.github/workflows/build.yml 🔗
@@ -7,3 +7,5 @@ jobs:
with:
go-version: ""
go-version-file: ./go.mod
+ secrets:
+ gh_pat: "${{ secrets.PERSONAL_ACCESS_TOKEN }}"
Christian Rocha created
Configuration tests and new logging
.github/workflows/build.yml | 2
.gitignore | 2
.golangci.yml | 2
cmd/logs.go | 99
cmd/root.go | 61
cmd/schema/README.md | 182
cmd/schema/main.go | 155
crush-schema.json | 700
crush.json | 8
cspell.json | 2
go.mod | 38
go.sum | 53
internal/app/app.go | 27
internal/app/lsp.go | 37
internal/config/config.go | 1513
internal/config/config_test.go | 2075
internal/config/fs.go | 71
internal/config/init.go | 44
internal/config/load.go | 539
internal/config/load_test.go | 1150
internal/config/merge.go | 16
internal/config/merge_test.go | 27
internal/config/provider.go | 87
internal/config/provider_mock.go | 293
internal/config/provider_test.go | 112
internal/config/resolve.go | 90
internal/config/resolve_test.go | 177
internal/config/shell.go | 73
internal/config/validation_test.go | 462
internal/db/connect.go | 15
internal/diff/diff.go | 5
internal/env/env.go | 58
internal/env/env_test.go | 142
internal/fsext/fileutil.go | 7
internal/fur/provider/provider.go | 2
internal/llm/agent/agent.go | 138
internal/llm/agent/mcp-tools.go | 35
internal/llm/prompt/coder.go | 16
internal/llm/prompt/prompt.go | 15
internal/llm/prompt/prompt_test.go | 56
internal/llm/prompt/summarizer.go | 6
internal/llm/prompt/task.go | 4
internal/llm/prompt/title.go | 6
internal/llm/provider/anthropic.go | 31
internal/llm/provider/bedrock.go | 9
internal/llm/provider/gemini.go | 29
internal/llm/provider/openai.go | 27
internal/llm/provider/provider.go | 22
internal/llm/provider/vertexai.go | 4
internal/llm/tools/bash.go | 9
internal/llm/tools/edit.go | 33
internal/llm/tools/fetch.go | 7
internal/llm/tools/glob.go | 17
internal/llm/tools/grep.go | 13
internal/llm/tools/ls.go | 15
internal/llm/tools/view.go | 9
internal/llm/tools/write.go | 18
internal/log/log.go | 61
internal/logging/logger.go | 209
internal/logging/message.go | 21
internal/logging/writer.go | 102
internal/lsp/client.go | 43
internal/lsp/handlers.go | 15
internal/lsp/protocol/tsprotocol.go | 2
internal/lsp/transport.go | 32
internal/lsp/watcher/watcher.go | 83
internal/permission/permission.go | 7
internal/shell/persistent.go | 7
internal/tui/components/anim/anim.go | 5
internal/tui/components/anim/example/main.go | 6
internal/tui/components/chat/chat.go | 10
internal/tui/components/chat/editor/editor.go | 20
internal/tui/components/chat/header/header.go | 11
internal/tui/components/chat/messages/messages.go | 23
internal/tui/components/chat/messages/renderer.go | 4
internal/tui/components/chat/messages/tool.go | 8
internal/tui/components/chat/sidebar/sidebar.go | 24
internal/tui/components/chat/splash/splash.go | 4
internal/tui/components/completions/completions.go | 9
internal/tui/components/completions/item.go | 4
internal/tui/components/core/helpers.go | 2
internal/tui/components/core/layout/container.go | 14
internal/tui/components/core/layout/split.go | 49
internal/tui/components/core/list/list.go | 22
internal/tui/components/core/status/status.go | 38
internal/tui/components/dialogs/commands/arguments.go | 20
internal/tui/components/dialogs/commands/commands.go | 22
internal/tui/components/dialogs/commands/item.go | 4
internal/tui/components/dialogs/compact/compact.go | 4
internal/tui/components/dialogs/dialogs.go | 9
internal/tui/components/dialogs/filepicker/filepicker.go | 14
internal/tui/components/dialogs/init/init.go | 4
internal/tui/components/dialogs/models/models.go | 72
internal/tui/components/dialogs/permissions/permissions.go | 4
internal/tui/components/dialogs/quit/quit.go | 6
internal/tui/components/dialogs/sessions/sessions.go | 20
internal/tui/components/logs/details.go | 176
internal/tui/components/logs/table.go | 197
internal/tui/exp/diffview/Taskfile.yaml | 0
internal/tui/exp/diffview/chroma.go | 0
internal/tui/exp/diffview/diffview.go | 0
internal/tui/exp/diffview/diffview_test.go | 2
internal/tui/exp/diffview/split.go | 0
internal/tui/exp/diffview/style.go | 0
internal/tui/exp/diffview/testdata/TestDefault.after | 0
internal/tui/exp/diffview/testdata/TestDefault.before | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/CustomContextLines/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/CustomContextLines/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/Default/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/Default/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/LargeWidth/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/LargeWidth/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/MultipleHunks/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/MultipleHunks/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/Narrow/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/Narrow/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/NoLineNumbers/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/NoLineNumbers/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/NoSyntaxHighlight/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/NoSyntaxHighlight/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/SmallWidth/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Split/SmallWidth/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/CustomContextLines/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/CustomContextLines/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/Default/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/Default/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/LargeWidth/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/LargeWidth/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/MultipleHunks/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/MultipleHunks/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/Narrow/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/Narrow/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/NoLineNumbers/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/NoLineNumbers/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/NoSyntaxHighlight/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/NoSyntaxHighlight/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/SmallWidth/DarkMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffView/Unified/SmallWidth/LightMode.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf001.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf002.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf003.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf004.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf005.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf006.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf007.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf008.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf009.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf010.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf011.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf012.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf013.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf014.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf015.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf016.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf017.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf018.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf019.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Split/HeightOf020.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf001.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf002.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf003.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf004.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf005.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf006.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf007.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf008.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf009.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf010.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf011.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf012.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf013.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf014.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf015.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf016.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf017.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf018.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf019.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewHeight/Unified/HeightOf020.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewTabs/Split.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewTabs/Unified.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf001.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf002.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf003.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf004.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf005.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf006.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf007.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf008.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf009.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf010.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf011.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf012.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf013.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf014.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf015.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf016.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf017.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf018.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf019.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf020.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf021.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf022.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf023.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf024.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf025.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf026.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf027.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf028.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf029.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf030.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf031.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf032.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf033.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf034.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf035.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf036.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf037.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf038.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf039.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf040.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf041.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf042.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf043.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf044.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf045.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf046.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf047.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf048.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf049.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf050.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf051.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf052.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf053.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf054.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf055.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf056.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf057.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf058.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf059.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf060.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf061.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf062.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf063.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf064.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf065.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf066.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf067.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf068.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf069.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf070.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf071.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf072.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf073.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf074.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf075.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf076.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf077.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf078.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf079.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf080.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf081.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf082.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf083.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf084.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf085.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf086.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf087.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf088.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf089.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf090.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf091.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf092.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf093.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf094.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf095.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf096.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf097.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf098.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf099.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf100.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf101.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf102.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf103.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf104.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf105.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf106.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf107.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf108.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf109.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Split/WidthOf110.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf001.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf002.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf003.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf004.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf005.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf006.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf007.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf008.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf009.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf010.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf011.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf012.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf013.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf014.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf015.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf016.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf017.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf018.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf019.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf020.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf021.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf022.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf023.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf024.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf025.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf026.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf027.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf028.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf029.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf030.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf031.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf032.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf033.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf034.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf035.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf036.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf037.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf038.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf039.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf040.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf041.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf042.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf043.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf044.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf045.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf046.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf047.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf048.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf049.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf050.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf051.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf052.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf053.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf054.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf055.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf056.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf057.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf058.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf059.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewWidth/Unified/WidthOf060.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf00.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf01.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf02.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf03.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf04.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf05.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf06.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf07.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf08.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf09.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf10.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf11.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf12.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf13.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf14.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf15.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf16.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf17.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf18.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf19.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Split/XOffsetOf20.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf00.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf01.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf02.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf03.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf04.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf05.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf06.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf07.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf08.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf09.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf10.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf11.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf12.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf13.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf14.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf15.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf16.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf17.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf18.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf19.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewXOffset/Unified/XOffsetOf20.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf00.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf01.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf02.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf03.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf04.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf05.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf06.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf07.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf08.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf09.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf10.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf11.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf12.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf13.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf14.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf15.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Split/YOffsetOf16.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf00.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf01.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf02.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf03.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf04.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf05.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf06.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf07.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf08.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf09.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf10.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf11.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf12.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf13.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf14.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf15.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffset/Unified/YOffsetOf16.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf00.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf01.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf02.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf03.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf04.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf05.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf06.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf07.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf08.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf09.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf10.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf11.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf12.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf13.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf14.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf15.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Split/YOffsetOf16.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf00.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf01.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf02.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf03.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf04.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf05.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf06.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf07.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf08.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf09.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf10.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf11.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf12.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf13.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf14.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf15.golden | 0
internal/tui/exp/diffview/testdata/TestDiffViewYOffsetInfinite/Unified/YOffsetOf16.golden | 0
internal/tui/exp/diffview/testdata/TestMultipleHunks.after | 0
internal/tui/exp/diffview/testdata/TestMultipleHunks.before | 0
internal/tui/exp/diffview/testdata/TestNarrow.after | 0
internal/tui/exp/diffview/testdata/TestNarrow.before | 0
internal/tui/exp/diffview/testdata/TestTabs.after | 0
internal/tui/exp/diffview/testdata/TestTabs.before | 0
internal/tui/exp/diffview/testdata/TestUdiff/ToUnifiedDiff/DefaultContextLines/Content.golden | 0
internal/tui/exp/diffview/testdata/TestUdiff/ToUnifiedDiff/DefaultContextLines/JSON.golden | 0
internal/tui/exp/diffview/testdata/TestUdiff/ToUnifiedDiff/DefaultContextLinesPlusOne/Content.golden | 0
internal/tui/exp/diffview/testdata/TestUdiff/ToUnifiedDiff/DefaultContextLinesPlusOne/JSON.golden | 0
internal/tui/exp/diffview/testdata/TestUdiff/ToUnifiedDiff/DefaultContextLinesPlusTwo/Content.golden | 0
internal/tui/exp/diffview/testdata/TestUdiff/ToUnifiedDiff/DefaultContextLinesPlusTwo/JSON.golden | 0
internal/tui/exp/diffview/testdata/TestUdiff/Unified.golden | 0
internal/tui/exp/diffview/udiff_test.go | 0
internal/tui/exp/diffview/util.go | 0
internal/tui/exp/diffview/util_test.go | 0
internal/tui/exp/list/list.go | 89
internal/tui/highlight/highlight.go | 0
internal/tui/keys.go | 6
internal/tui/page/chat/chat.go | 24
internal/tui/page/logs/keys.go | 43
internal/tui/page/logs/logs.go | 100
internal/tui/styles/theme.go | 2
internal/tui/tui.go | 56
internal/tui/util/util.go | 6
main.go | 14
todos.md | 51
vendor/cloud.google.com/go/LICENSE | 202
vendor/cloud.google.com/go/auth/CHANGES.md | 368
vendor/cloud.google.com/go/auth/LICENSE | 202
vendor/cloud.google.com/go/auth/README.md | 40
vendor/cloud.google.com/go/auth/auth.go | 618
vendor/cloud.google.com/go/auth/credentials/compute.go | 90
vendor/cloud.google.com/go/auth/credentials/detect.go | 279
vendor/cloud.google.com/go/auth/credentials/doc.go | 45
vendor/cloud.google.com/go/auth/credentials/filetypes.go | 231
vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/aws_provider.go | 531
vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/executable_provider.go | 284
vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/externalaccount.go | 428
vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/file_provider.go | 78
vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/info.go | 74
vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/programmatic_provider.go | 30
vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/url_provider.go | 93
vendor/cloud.google.com/go/auth/credentials/internal/externalaccount/x509_provider.go | 63
vendor/cloud.google.com/go/auth/credentials/internal/externalaccountuser/externalaccountuser.go | 115
vendor/cloud.google.com/go/auth/credentials/internal/gdch/gdch.go | 191
vendor/cloud.google.com/go/auth/credentials/internal/impersonate/impersonate.go | 156
vendor/cloud.google.com/go/auth/credentials/internal/stsexchange/sts_exchange.go | 167
vendor/cloud.google.com/go/auth/credentials/selfsignedjwt.go | 89
vendor/cloud.google.com/go/auth/httptransport/httptransport.go | 247
vendor/cloud.google.com/go/auth/httptransport/transport.go | 234
vendor/cloud.google.com/go/auth/internal/credsfile/credsfile.go | 107
vendor/cloud.google.com/go/auth/internal/credsfile/filetype.go | 157
vendor/cloud.google.com/go/auth/internal/credsfile/parse.go | 98
vendor/cloud.google.com/go/auth/internal/internal.go | 219
vendor/cloud.google.com/go/auth/internal/jwt/jwt.go | 171
vendor/cloud.google.com/go/auth/internal/transport/cba.go | 368
vendor/cloud.google.com/go/auth/internal/transport/cert/default_cert.go | 65
vendor/cloud.google.com/go/auth/internal/transport/cert/enterprise_cert.go | 54
vendor/cloud.google.com/go/auth/internal/transport/cert/secureconnect_cert.go | 124
vendor/cloud.google.com/go/auth/internal/transport/cert/workload_cert.go | 114
vendor/cloud.google.com/go/auth/internal/transport/s2a.go | 138
vendor/cloud.google.com/go/auth/internal/transport/transport.go | 106
vendor/cloud.google.com/go/auth/threelegged.go | 382
vendor/cloud.google.com/go/civil/civil.go | 350
vendor/cloud.google.com/go/compute/metadata/CHANGES.md | 66
vendor/cloud.google.com/go/compute/metadata/LICENSE | 202
vendor/cloud.google.com/go/compute/metadata/README.md | 27
vendor/cloud.google.com/go/compute/metadata/log.go | 149
vendor/cloud.google.com/go/compute/metadata/metadata.go | 872
vendor/cloud.google.com/go/compute/metadata/retry.go | 114
vendor/cloud.google.com/go/compute/metadata/retry_linux.go | 31
vendor/cloud.google.com/go/compute/metadata/syscheck.go | 26
vendor/cloud.google.com/go/compute/metadata/syscheck_linux.go | 28
vendor/cloud.google.com/go/compute/metadata/syscheck_windows.go | 38
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/CHANGELOG.md | 849
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/LICENSE.txt | 21
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/README.md | 39
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/internal/resource/resource_identifier.go | 239
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/internal/resource/resource_type.go | 114
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/policy/policy.go | 108
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/runtime/pipeline.go | 70
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/runtime/policy_bearer_token.go | 102
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/runtime/policy_register_rp.go | 322
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/runtime/policy_trace_namespace.go | 30
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/runtime/runtime.go | 24
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/ci.yml | 29
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud/cloud.go | 44
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud/doc.go | 53
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/core.go | 173
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/doc.go | 264
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/errors.go | 17
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/etag.go | 57
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported/exported.go | 175
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported/pipeline.go | 77
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported/request.go | 260
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported/response_error.go | 201
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log/log.go | 50
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/async/async.go | 159
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/body/body.go | 135
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/fake/fake.go | 133
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/loc/loc.go | 123
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/op/op.go | 148
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/poller.go | 24
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/util.go | 212
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/constants.go | 44
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared/shared.go | 149
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/log/doc.go | 10
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/log/log.go | 55
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/policy/doc.go | 10
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/policy/policy.go | 198
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/doc.go | 10
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/errors.go | 27
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/pager.go | 138
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/pipeline.go | 94
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_api_version.go | 75
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_bearer_token.go | 236
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_body_download.go | 72
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_http_header.go | 40
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_http_trace.go | 154
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_include_response.go | 35
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_key_credential.go | 64
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_logging.go | 264
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_request_id.go | 34
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_retry.go | 276
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_sas_credential.go | 55
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/policy_telemetry.go | 83
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/poller.go | 396
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/request.go | 281
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/response.go | 109
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_dialer_other.go | 15
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_dialer_wasm.go | 15
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime/transport_default_http_client.go | 48
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/streaming/doc.go | 9
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/streaming/progress.go | 89
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/tracing/constants.go | 41
vendor/github.com/Azure/azure-sdk-for-go/sdk/azcore/tracing/tracing.go | 191
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/.gitignore | 4
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/CHANGELOG.md | 575
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/LICENSE.txt | 21
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/MIGRATION.md | 307
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/README.md | 258
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TOKEN_CACHING.MD | 71
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/TROUBLESHOOTING.md | 241
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/assets.json | 6
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/authentication_record.go | 95
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/azidentity.go | 190
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/azure_cli_credential.go | 190
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/azure_developer_cli_credential.go | 169
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/azure_pipelines_credential.go | 140
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/chained_token_credential.go | 138
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/ci.yml | 46
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/client_assertion_credential.go | 85
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/client_certificate_credential.go | 174
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/client_secret_credential.go | 75
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/confidential_client.go | 184
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/default_azure_credential.go | 165
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/developer_credential_util.go | 38
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/device_code_credential.go | 138
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/environment_credential.go | 167
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/errors.go | 170
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/go.work.sum | 60
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/interactive_browser_credential.go | 118
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/internal/exported.go | 18
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/internal/internal.go | 31
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/logging.go | 14
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/managed-identity-matrix.json | 17
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/managed_identity_client.go | 501
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/managed_identity_credential.go | 128
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/on_behalf_of_credential.go | 113
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/public_client.go | 273
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/test-resources-post.ps1 | 112
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/test-resources-pre.ps1 | 44
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/test-resources.bicep | 219
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/username_password_credential.go | 90
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/version.go | 18
vendor/github.com/Azure/azure-sdk-for-go/sdk/azidentity/workload_identity.go | 131
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/LICENSE.txt | 21
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/diag/diag.go | 51
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/diag/doc.go | 7
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/errorinfo/doc.go | 7
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/errorinfo/errorinfo.go | 46
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/exported/exported.go | 129
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/log/doc.go | 7
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/log/log.go | 104
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/poller/util.go | 155
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/temporal/resource.go | 123
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/uuid/doc.go | 7
vendor/github.com/Azure/azure-sdk-for-go/sdk/internal/uuid/uuid.go | 76
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/LICENSE | 21
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/cache/cache.go | 54
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential/confidential.go | 719
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/errors/error_design.md | 111
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/errors/errors.go | 89
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/base.go | 477
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/items.go | 213
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/partitioned_storage.go | 442
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage/storage.go | 583
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/exported/exported.go | 34
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/design.md | 140
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/json.go | 184
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/mapslice.go | 333
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/marshal.go | 346
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/struct.go | 290
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/types/time/time.go | 70
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/local/server.go | 177
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/oauth.go | 354
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/accesstokens.go | 457
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/apptype_string.go | 25
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens/tokens.go | 339
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority/authority.go | 589
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority/authorizetype_string.go | 30
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/internal/comm/comm.go | 320
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/internal/comm/compress.go | 33
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/internal/grant/grant.go | 17
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/ops.go | 56
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/defs/endpointtype_string.go | 25
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/defs/mex_document_definitions.go | 394
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/defs/saml_assertion_definitions.go | 230
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/defs/version_string.go | 25
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/defs/wstrust_endpoint.go | 199
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/defs/wstrust_mex_document.go | 159
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/wstrust.go | 136
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/resolvers.go | 149
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/options/options.go | 52
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared/shared.go | 72
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/version/version.go | 8
vendor/github.com/AzureAD/microsoft-authentication-library-for-go/apps/public/public.go | 756
vendor/github.com/JohannesKaufmann/html-to-markdown/.gitignore | 14
vendor/github.com/JohannesKaufmann/html-to-markdown/CONTRIBUTING.md | 0
vendor/github.com/JohannesKaufmann/html-to-markdown/LICENSE | 21
vendor/github.com/JohannesKaufmann/html-to-markdown/README.md | 242
vendor/github.com/JohannesKaufmann/html-to-markdown/SECURITY.md | 6
vendor/github.com/JohannesKaufmann/html-to-markdown/commonmark.go | 393
vendor/github.com/JohannesKaufmann/html-to-markdown/escape/escape.go | 65
vendor/github.com/JohannesKaufmann/html-to-markdown/from.go | 464
vendor/github.com/JohannesKaufmann/html-to-markdown/logo_five_years.png | 0
vendor/github.com/JohannesKaufmann/html-to-markdown/markdown.go | 212
vendor/github.com/JohannesKaufmann/html-to-markdown/utils.go | 533
vendor/github.com/MakeNowJust/heredoc/LICENSE | 21
vendor/github.com/MakeNowJust/heredoc/README.md | 52
vendor/github.com/MakeNowJust/heredoc/heredoc.go | 105
vendor/github.com/PuerkitoBio/goquery/.gitattributes | 1
vendor/github.com/PuerkitoBio/goquery/.gitignore | 16
vendor/github.com/PuerkitoBio/goquery/LICENSE | 12
vendor/github.com/PuerkitoBio/goquery/README.md | 202
vendor/github.com/PuerkitoBio/goquery/array.go | 124
vendor/github.com/PuerkitoBio/goquery/doc.go | 123
vendor/github.com/PuerkitoBio/goquery/expand.go | 70
vendor/github.com/PuerkitoBio/goquery/filter.go | 163
vendor/github.com/PuerkitoBio/goquery/iteration.go | 47
vendor/github.com/PuerkitoBio/goquery/manipulation.go | 679
vendor/github.com/PuerkitoBio/goquery/property.go | 275
vendor/github.com/PuerkitoBio/goquery/query.go | 49
vendor/github.com/PuerkitoBio/goquery/traversal.go | 704
vendor/github.com/PuerkitoBio/goquery/type.go | 203
vendor/github.com/PuerkitoBio/goquery/utilities.go | 178
vendor/github.com/alecthomas/chroma/v2/.editorconfig | 17
vendor/github.com/alecthomas/chroma/v2/.gitignore | 25
vendor/github.com/alecthomas/chroma/v2/.golangci.yml | 95
vendor/github.com/alecthomas/chroma/v2/.goreleaser.yml | 37
vendor/github.com/alecthomas/chroma/v2/Bitfile | 24
vendor/github.com/alecthomas/chroma/v2/COPYING | 19
vendor/github.com/alecthomas/chroma/v2/Makefile | 23
vendor/github.com/alecthomas/chroma/v2/README.md | 297
vendor/github.com/alecthomas/chroma/v2/coalesce.go | 35
vendor/github.com/alecthomas/chroma/v2/colour.go | 192
vendor/github.com/alecthomas/chroma/v2/delegate.go | 152
vendor/github.com/alecthomas/chroma/v2/doc.go | 7
vendor/github.com/alecthomas/chroma/v2/emitters.go | 218
vendor/github.com/alecthomas/chroma/v2/formatter.go | 43
vendor/github.com/alecthomas/chroma/v2/formatters/api.go | 57
vendor/github.com/alecthomas/chroma/v2/formatters/html/html.go | 623
vendor/github.com/alecthomas/chroma/v2/formatters/json.go | 39
vendor/github.com/alecthomas/chroma/v2/formatters/svg/font_liberation_mono.go | 50
vendor/github.com/alecthomas/chroma/v2/formatters/svg/svg.go | 222
vendor/github.com/alecthomas/chroma/v2/formatters/tokens.go | 18
vendor/github.com/alecthomas/chroma/v2/formatters/tty_indexed.go | 284
vendor/github.com/alecthomas/chroma/v2/formatters/tty_truecolour.go | 76
vendor/github.com/alecthomas/chroma/v2/iterator.go | 76
vendor/github.com/alecthomas/chroma/v2/lexer.go | 162
vendor/github.com/alecthomas/chroma/v2/lexers/README.md | 46
vendor/github.com/alecthomas/chroma/v2/lexers/caddyfile.go | 275
vendor/github.com/alecthomas/chroma/v2/lexers/cl.go | 243
vendor/github.com/alecthomas/chroma/v2/lexers/dns.go | 17
vendor/github.com/alecthomas/chroma/v2/lexers/emacs.go | 533
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abap.xml | 102
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/abnf.xml | 66
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript.xml | 41
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/actionscript_3.xml | 163
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ada.xml | 321
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/agda.xml | 56
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/al.xml | 39
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/alloy.xml | 58
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/angular2.xml | 108
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/antlr.xml | 317
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apacheconf.xml | 74
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/apl.xml | 59
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/applescript.xml | 47
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arangodb_aql.xml | 154
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/arduino.xml | 187
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/armasm.xml | 126
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/atl.xml | 165
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autohotkey.xml | 42
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/autoit.xml | 33
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/awk.xml | 95
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ballerina.xml | 97
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash.xml | 220
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bash_session.xml | 25
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/batchfile.xml | 660
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/beef.xml | 120
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bibtex.xml | 152
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bicep.xml | 84
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/blitzbasic.xml | 141
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bnf.xml | 28
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/bqn.xml | 83
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/brainfuck.xml | 51
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c#.xml | 121
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c++.xml | 331
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/c.xml | 260
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cap_n_proto.xml | 122
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cassandra_cql.xml | 137
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ceylon.xml | 151
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfengine3.xml | 197
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cfstatement.xml | 92
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chaiscript.xml | 134
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/chapel.xml | 143
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cheetah.xml | 55
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/clojure.xml | 50
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cmake.xml | 90
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cobol.xml | 63
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coffeescript.xml | 210
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/common_lisp.xml | 184
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/coq.xml | 136
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/crystal.xml | 762
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/css.xml | 65
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/csv.xml | 53
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cue.xml | 85
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/cython.xml | 372
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/d.xml | 133
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dart.xml | 213
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dax.xml | 12
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/desktop_entry.xml | 17
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/diff.xml | 52
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/django_jinja.xml | 153
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dns.xml | 17
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/docker.xml | 57
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dtd.xml | 168
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/dylan.xml | 176
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ebnf.xml | 90
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elixir.xml | 744
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/elm.xml | 119
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/emacslisp.xml | 132
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/erlang.xml | 21
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/factor.xml | 303
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fennel.xml | 47
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fish.xml | 159
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/forth.xml | 35
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortran.xml | 30
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fortranfixed.xml | 71
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/fsharp.xml | 245
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gas.xml | 150
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript.xml | 136
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gdscript3.xml | 22
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gherkin.xml | 18
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gleam.xml | 117
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/glsl.xml | 65
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/gnuplot.xml | 219
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/go_template.xml | 114
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/graphql.xml | 88
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groff.xml | 90
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/groovy.xml | 135
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/handlebars.xml | 147
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hare.xml | 98
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/haskell.xml | 275
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hcl.xml | 143
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hexdump.xml | 189
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlb.xml | 149
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hlsl.xml | 68
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/holyc.xml | 252
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/html.xml | 159
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/hy.xml | 104
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/idris.xml | 216
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/igor.xml | 26
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ini.xml | 45
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/io.xml | 71
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/iscdhcpd.xml | 96
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/j.xml | 157
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/java.xml | 193
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/javascript.xml | 160
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/json.xml | 112
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jsonata.xml | 83
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jsonnet.xml | 138
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/julia.xml | 198
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/jungle.xml | 98
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/kotlin.xml | 223
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lighttpd_configuration_file.xml | 42
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/llvm.xml | 61
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/lua.xml | 158
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/makefile.xml | 131
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mako.xml | 120
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mason.xml | 89
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/materialize_sql_dialect.xml | 47
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mathematica.xml | 60
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/matlab.xml | 65
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mcfunction.xml | 138
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/meson.xml | 85
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/metal.xml | 270
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/minizinc.xml | 82
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mlir.xml | 73
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/modula-2.xml | 245
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/monkeyc.xml | 153
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/morrowindscript.xml | 63
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/myghty.xml | 77
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/mysql.xml | 77
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nasm.xml | 126
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/natural.xml | 102
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ndisasm.xml | 123
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/newspeak.xml | 121
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nginx_configuration_file.xml | 98
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nim.xml | 211
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nix.xml | 258
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/nsis.xml | 33
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objective-c.xml | 510
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/objectpascal.xml | 145
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ocaml.xml | 145
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/octave.xml | 19
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/odin.xml | 113
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/onesenterprise.xml | 92
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openedge_abl.xml | 39
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/openscad.xml | 96
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/org_mode.xml | 329
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pacmanconf.xml | 37
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/perl.xml | 84
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/php.xml | 212
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pig.xml | 105
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pkgconfig.xml | 73
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pl_pgsql.xml | 35
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plaintext.xml | 21
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/plutus_core.xml | 105
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/pony.xml | 135
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postgresql_sql_dialect.xml | 47
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/postscript.xml | 89
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/povray.xml | 22
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powerquery.xml | 51
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/powershell.xml | 230
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prolog.xml | 115
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promela.xml | 119
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/promql.xml | 123
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/properties.xml | 45
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/protocol_buffer.xml | 118
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/prql.xml | 161
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/psl.xml | 213
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/puppet.xml | 94
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python.xml | 260
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/python_2.xml | 356
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qbasic.xml | 173
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/qml.xml | 113
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/r.xml | 128
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/racket.xml | 213
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ragel.xml | 149
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/react.xml | 236
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reasonml.xml | 147
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/reg.xml | 68
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rego.xml | 94
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rexx.xml | 127
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rpm_spec.xml | 58
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ruby.xml | 724
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/rust.xml | 375
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sas.xml | 129
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sass.xml | 123
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scala.xml | 163
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scheme.xml | 59
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scilab.xml | 21
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/scss.xml | 53
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sed.xml | 28
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sieve.xml | 61
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smali.xml | 73
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smalltalk.xml | 294
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/smarty.xml | 79
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snbt.xml | 58
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/snobol.xml | 95
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/solidity.xml | 125
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sourcepawn.xml | 59
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sparql.xml | 160
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/sql.xml | 29
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/squidconf.xml | 20
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/standard_ml.xml | 548
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stas.xml | 85
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/stylus.xml | 16
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/swift.xml | 106
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemd.xml | 63
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/systemverilog.xml | 129
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tablegen.xml | 69
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tal.xml | 43
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tasm.xml | 135
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcl.xml | 272
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tcsh.xml | 121
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/termcap.xml | 75
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terminfo.xml | 84
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/terraform.xml | 140
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tex.xml | 113
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/thrift.xml | 154
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/toml.xml | 44
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/tradingview.xml | 42
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/transact-sql.xml | 38
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turing.xml | 82
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/turtle.xml | 170
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/twig.xml | 155
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typescript.xml | 295
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscript.xml | 178
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscriptcssdata.xml | 52
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typoscripthtmldata.xml | 52
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/typst.xml | 108
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/ucode.xml | 147
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v.xml | 100
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/v_shell.xml | 144
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vala.xml | 72
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vb_net.xml | 162
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/verilog.xml | 158
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhdl.xml | 171
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vhs.xml | 48
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/viml.xml | 85
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/vue.xml | 307
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/wdte.xml | 43
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webgpu_shading_language.xml | 32
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/webvtt.xml | 283
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/whiley.xml | 57
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xml.xml | 95
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/xorg.xml | 35
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yaml.xml | 122
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/yang.xml | 99
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/z80_assembly.xml | 74
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zed.xml | 51
vendor/github.com/alecthomas/chroma/v2/lexers/embedded/zig.xml | 112
vendor/github.com/alecthomas/chroma/v2/lexers/genshi.go | 118
vendor/github.com/alecthomas/chroma/v2/lexers/go.go | 81
vendor/github.com/alecthomas/chroma/v2/lexers/haxe.go | 647
vendor/github.com/alecthomas/chroma/v2/lexers/html.go | 8
vendor/github.com/alecthomas/chroma/v2/lexers/http.go | 131
1,000 files changed, 77,449 insertions(+), 7,166 deletions(-)
@@ -7,3 +7,5 @@ jobs:
with:
go-version: ""
go-version-file: ./go.mod
+ secrets:
+ gh_pat: "${{ secrets.PERSONAL_ACCESS_TOKEN }}"
@@ -43,7 +43,7 @@ Thumbs.db
**/.crush/**
-crush
+/crush
manpages/
completions/
@@ -6,7 +6,7 @@ linters:
# - goconst
# - godot
# - godox
- - gomoddirectives
+ # - gomoddirectives
- goprintffuncname
# - gosec
- misspell
@@ -0,0 +1,99 @@
+package cmd
+
+import (
+ "encoding/json"
+ "fmt"
+ "os"
+ "path/filepath"
+ "slices"
+ "time"
+
+ "github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/log/v2"
+ "github.com/nxadm/tail"
+ "github.com/spf13/cobra"
+)
+
+func init() {
+ rootCmd.AddCommand(logsCmd)
+}
+
+var logsCmd = &cobra.Command{
+ Use: "logs",
+ Short: "View crush logs",
+ Long: `View the logs generated by Crush. This command allows you to see the log output for debugging and monitoring purposes.`,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ cwd, err := cmd.Flags().GetString("cwd")
+ if err != nil {
+ return fmt.Errorf("failed to get current working directory: %v", err)
+ }
+ log.SetLevel(log.DebugLevel)
+ cfg, err := config.Load(cwd, false)
+ if err != nil {
+ return fmt.Errorf("failed to load configuration: %v", err)
+ }
+ logsFile := filepath.Join(cfg.WorkingDir(), cfg.Options.DataDirectory, "logs", "crush.log")
+ _, err = os.Stat(logsFile)
+ if os.IsNotExist(err) {
+ log.Warn("Looks like you are not in a crush project. No logs found.")
+ return nil
+ }
+ t, err := tail.TailFile(logsFile, tail.Config{Follow: true, ReOpen: true, Logger: tail.DiscardingLogger})
+ if err != nil {
+ return fmt.Errorf("failed to tail log file: %v", err)
+ }
+
+ // Print the text of each received line
+ for line := range t.Lines {
+ var data map[string]any
+ if err := json.Unmarshal([]byte(line.Text), &data); err != nil {
+ continue
+ }
+ msg := data["msg"]
+ level := data["level"]
+ otherData := []any{}
+ keys := []string{}
+ for k := range data {
+ keys = append(keys, k)
+ }
+ slices.Sort(keys)
+ for _, k := range keys {
+ switch k {
+ case "msg", "level", "time":
+ continue
+ case "source":
+ source, ok := data[k].(map[string]any)
+ if !ok {
+ continue
+ }
+ sourceFile := fmt.Sprintf("%s:%d", source["file"], int(source["line"].(float64)))
+ otherData = append(otherData, "source", sourceFile)
+
+ default:
+ otherData = append(otherData, k, data[k])
+ }
+ }
+ log.SetTimeFunction(func(_ time.Time) time.Time {
+ // parse the timestamp from the log line if available
+ t, err := time.Parse(time.RFC3339, data["time"].(string))
+ if err != nil {
+ return time.Now() // fallback to current time if parsing fails
+ }
+ return t
+ })
+ switch level {
+ case "INFO":
+ log.Info(msg, otherData...)
+ case "DEBUG":
+ log.Debug(msg, otherData...)
+ case "ERROR":
+ log.Error(msg, otherData...)
+ case "WARN":
+ log.Warn(msg, otherData...)
+ default:
+ log.Info(msg, otherData...)
+ }
+ }
+ return nil
+ },
+}
@@ -4,6 +4,7 @@ import (
"context"
"fmt"
"io"
+ "log/slog"
"os"
"sync"
"time"
@@ -14,7 +15,7 @@ import (
"github.com/charmbracelet/crush/internal/db"
"github.com/charmbracelet/crush/internal/format"
"github.com/charmbracelet/crush/internal/llm/agent"
- "github.com/charmbracelet/crush/internal/logging"
+ "github.com/charmbracelet/crush/internal/log"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/tui"
"github.com/charmbracelet/crush/internal/version"
@@ -36,7 +37,7 @@ to assist developers in writing, debugging, and understanding code directly from
# Run with debug logging
crush -d
- # Run with debug logging in a specific directory
+ # Run with debug slog.in a specific directory
crush -d -c /path/to/project
# Print version
@@ -75,7 +76,7 @@ to assist developers in writing, debugging, and understanding code directly from
cwd = c
}
- _, err := config.Init(cwd, debug)
+ cfg, err := config.Init(cwd, debug)
if err != nil {
return err
}
@@ -85,25 +86,25 @@ to assist developers in writing, debugging, and understanding code directly from
defer cancel()
// Connect DB, this will also run migrations
- conn, err := db.Connect(ctx)
+ conn, err := db.Connect(ctx, cfg.Options.DataDirectory)
if err != nil {
return err
}
- app, err := app.New(ctx, conn)
+ app, err := app.New(ctx, conn, cfg)
if err != nil {
- logging.Error("Failed to create app: %v", err)
+ slog.Error(fmt.Sprintf("Failed to create app instance: %v", err))
return err
}
// Defer shutdown here so it runs for both interactive and non-interactive modes
defer app.Shutdown()
// Initialize MCP tools early for both modes
- initMCPTools(ctx, app)
+ initMCPTools(ctx, app, cfg)
prompt, err = maybePrependStdin(prompt)
if err != nil {
- logging.Error("Failed to read stdin: %v", err)
+ slog.Error(fmt.Sprintf("Failed to read from stdin: %v", err))
return err
}
@@ -132,18 +133,18 @@ to assist developers in writing, debugging, and understanding code directly from
// Set up message handling for the TUI
go func() {
defer tuiWg.Done()
- defer logging.RecoverPanic("TUI-message-handler", func() {
+ defer log.RecoverPanic("TUI-message-handler", func() {
attemptTUIRecovery(program)
})
for {
select {
case <-tuiCtx.Done():
- logging.Info("TUI message handler shutting down")
+ slog.Info("TUI message handler shutting down")
return
case msg, ok := <-ch:
if !ok {
- logging.Info("TUI message channel closed")
+ slog.Info("TUI message channel closed")
return
}
program.Send(msg)
@@ -165,7 +166,7 @@ to assist developers in writing, debugging, and understanding code directly from
// Wait for TUI message handler to finish
tuiWg.Wait()
- logging.Info("All goroutines cleaned up")
+ slog.Info("All goroutines cleaned up")
}
// Run the TUI
@@ -173,35 +174,35 @@ to assist developers in writing, debugging, and understanding code directly from
cleanup()
if err != nil {
- logging.Error("TUI error: %v", err)
+ slog.Error(fmt.Sprintf("TUI run error: %v", err))
return fmt.Errorf("TUI error: %v", err)
}
- logging.Info("TUI exited with result: %v", result)
+ slog.Info(fmt.Sprintf("TUI exited with result: %v", result))
return nil
},
}
// attemptTUIRecovery tries to recover the TUI after a panic
func attemptTUIRecovery(program *tea.Program) {
- logging.Info("Attempting to recover TUI after panic")
+ slog.Info("Attempting to recover TUI after panic")
// We could try to restart the TUI or gracefully exit
// For now, we'll just quit the program to avoid further issues
program.Quit()
}
-func initMCPTools(ctx context.Context, app *app.App) {
+func initMCPTools(ctx context.Context, app *app.App, cfg *config.Config) {
go func() {
- defer logging.RecoverPanic("MCP-goroutine", nil)
+ defer log.RecoverPanic("MCP-goroutine", nil)
// Create a context with timeout for the initial MCP tools fetch
ctxWithTimeout, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
// Set this up once with proper error handling
- agent.GetMcpTools(ctxWithTimeout, app.Permissions)
- logging.Info("MCP message handling goroutine exiting")
+ agent.GetMcpTools(ctxWithTimeout, app.Permissions, cfg)
+ slog.Info("MCP message handling goroutine exiting")
}()
}
@@ -215,7 +216,7 @@ func setupSubscriber[T any](
wg.Add(1)
go func() {
defer wg.Done()
- defer logging.RecoverPanic(fmt.Sprintf("subscription-%s", name), nil)
+ defer log.RecoverPanic(fmt.Sprintf("subscription-%s", name), nil)
subCh := subscriber(ctx)
@@ -223,7 +224,7 @@ func setupSubscriber[T any](
select {
case event, ok := <-subCh:
if !ok {
- logging.Info("subscription channel closed", "name", name)
+ slog.Info("subscription channel closed", "name", name)
return
}
@@ -232,13 +233,13 @@ func setupSubscriber[T any](
select {
case outputCh <- msg:
case <-time.After(2 * time.Second):
- logging.Warn("message dropped due to slow consumer", "name", name)
+ slog.Warn("message dropped due to slow consumer", "name", name)
case <-ctx.Done():
- logging.Info("subscription cancelled", "name", name)
+ slog.Info("subscription cancelled", "name", name)
return
}
case <-ctx.Done():
- logging.Info("subscription cancelled", "name", name)
+ slog.Info("subscription cancelled", "name", name)
return
}
}
@@ -251,7 +252,6 @@ func setupSubscriptions(app *app.App, parentCtx context.Context) (chan tea.Msg,
wg := sync.WaitGroup{}
ctx, cancel := context.WithCancel(parentCtx) // Inherit from parent context
- setupSubscriber(ctx, &wg, "logging", logging.Subscribe, ch)
setupSubscriber(ctx, &wg, "sessions", app.Sessions.Subscribe, ch)
setupSubscriber(ctx, &wg, "messages", app.Messages.Subscribe, ch)
setupSubscriber(ctx, &wg, "permissions", app.Permissions.Subscribe, ch)
@@ -259,22 +259,22 @@ func setupSubscriptions(app *app.App, parentCtx context.Context) (chan tea.Msg,
setupSubscriber(ctx, &wg, "history", app.History.Subscribe, ch)
cleanupFunc := func() {
- logging.Info("Cancelling all subscriptions")
+ slog.Info("Cancelling all subscriptions")
cancel() // Signal all goroutines to stop
waitCh := make(chan struct{})
go func() {
- defer logging.RecoverPanic("subscription-cleanup", nil)
+ defer log.RecoverPanic("subscription-cleanup", nil)
wg.Wait()
close(waitCh)
}()
select {
case <-waitCh:
- logging.Info("All subscription goroutines completed successfully")
+ slog.Info("All subscription goroutines completed successfully")
close(ch) // Only close after all writers are confirmed done
case <-time.After(5 * time.Second):
- logging.Warn("Timed out waiting for some subscription goroutines to complete")
+ slog.Warn("Timed out waiting for some subscription goroutines to complete")
close(ch)
}
}
@@ -292,9 +292,10 @@ func Execute() {
}
func init() {
+ rootCmd.PersistentFlags().StringP("cwd", "c", "", "Current working directory")
+
rootCmd.Flags().BoolP("help", "h", false, "Help")
rootCmd.Flags().BoolP("debug", "d", false, "Debug")
- rootCmd.Flags().StringP("cwd", "c", "", "Current working directory")
rootCmd.Flags().StringP("prompt", "p", "", "Prompt to run in non-interactive mode")
// Add format flag with validation logic
@@ -1,182 +0,0 @@
-# Crush Configuration Schema Generator
-
-This tool automatically generates a JSON Schema for the Crush configuration file by using Go reflection to analyze the configuration structs. The schema provides validation, autocompletion, and documentation for configuration files.
-
-## Features
-
-- **Automated Generation**: Uses reflection to automatically generate schemas from Go structs
-- **Always Up-to-Date**: Schema stays in sync with code changes automatically
-- **Comprehensive**: Includes all configuration options, types, and validation rules
-- **Enhanced**: Adds provider enums, model lists, and custom descriptions
-- **Extensible**: Easy to add new fields and modify existing ones
-
-## Usage
-
-```bash
-# Generate the schema
-go run cmd/schema/main.go > crush-schema.json
-
-# Or use the task runner
-task schema
-```
-
-## How It Works
-
-The generator:
-
-1. **Reflects on Config Structs**: Analyzes the `config.Config` struct and all related types
-2. **Generates Base Schema**: Creates JSON Schema definitions for all struct fields
-3. **Enhances with Runtime Data**: Adds provider lists, model enums, and tool lists from the actual codebase
-4. **Adds Custom Descriptions**: Provides meaningful descriptions for configuration options
-5. **Sets Default Values**: Includes appropriate defaults for optional fields
-
-## Schema Features
-
-The generated schema includes:
-
-- **Type Safety**: Proper type definitions for all configuration fields
-- **Validation**: Required fields, enum constraints, and format validation
-- **Documentation**: Descriptions for all configuration options
-- **Defaults**: Default values for optional settings
-- **Provider Enums**: Current list of supported providers
-- **Model Enums**: Available models from all configured providers
-- **Tool Lists**: Valid tool names for agent configurations
-- **Cross-References**: Proper relationships between different config sections
-
-## Adding New Configuration Fields
-
-To add new configuration options:
-
-1. **Add to Config Structs**: Add the field to the appropriate struct in `internal/config/`
-2. **Add JSON Tags**: Include proper JSON tags with field names
-3. **Regenerate Schema**: Run the schema generator to update the JSON schema
-4. **Update Validation**: Add any custom validation logic if needed
-
-Example:
-```go
-type Options struct {
- // ... existing fields ...
-
- // New field with JSON tag and description
- NewFeature bool `json:"new_feature,omitempty"`
-}
-```
-
-The schema generator will automatically:
-- Detect the new field
-- Generate appropriate JSON schema
-- Add type information
-- Include in validation
-
-## Using the Schema
-
-### Editor Integration
-
-Most modern editors support JSON Schema:
-
-**VS Code**: Add to your workspace settings:
-```json
-{
- "json.schemas": [
- {
- "fileMatch": ["crush.json", ".crush.json"],
- "url": "./crush-schema.json"
- }
- ]
-}
-```
-
-**JetBrains IDEs**: Configure in Settings → Languages & Frameworks → Schemas and DTDs → JSON Schema Mappings
-
-### Validation Tools
-
-```bash
-# Using jsonschema (Python)
-pip install jsonschema
-jsonschema -i crush.json crush-schema.json
-
-# Using ajv-cli (Node.js)
-npm install -g ajv-cli
-ajv validate -s crush-schema.json -d crush.json
-```
-
-### Configuration Example
-
-```json
-{
- "models": {
- "large": {
- "model_id": "claude-3-5-sonnet-20241022",
- "provider": "anthropic",
- "reasoning_effort": "medium",
- "max_tokens": 8192
- },
- "small": {
- "model_id": "claude-3-5-haiku-20241022",
- "provider": "anthropic"
- }
- },
- "providers": {
- "anthropic": {
- "id": "anthropic",
- "provider_type": "anthropic",
- "api_key": "your-api-key",
- "disabled": false
- }
- },
- "agents": {
- "coder": {
- "id": "coder",
- "name": "Coder",
- "model": "large",
- "disabled": false
- },
- "custom-agent": {
- "id": "custom-agent",
- "name": "Custom Agent",
- "description": "A custom agent for specific tasks",
- "model": "small",
- "allowed_tools": ["glob", "grep", "view"],
- "allowed_mcp": {
- "filesystem": ["read", "write"]
- }
- }
- },
- "mcp": {
- "filesystem": {
- "command": "mcp-filesystem",
- "args": ["--root", "/workspace"],
- "type": "stdio"
- }
- },
- "lsp": {
- "typescript": {
- "command": "typescript-language-server",
- "args": ["--stdio"],
- "enabled": true
- }
- },
- "options": {
- "context_paths": [
- "README.md",
- "docs/",
- ".cursorrules"
- ],
- "data_directory": ".crush",
- "debug": false,
- "tui": {
- "compact_mode": false
- }
- }
-}
-```
-
-## Maintenance
-
-The schema generator is designed to be maintenance-free. As long as:
-
-- Configuration structs have proper JSON tags
-- New enums are added to the enhancement functions
-- The generator is run after significant config changes
-
-The schema will stay current with the codebase automatically.
@@ -1,155 +0,0 @@
-package main
-
-import (
- "encoding/json"
- "fmt"
- "os"
-
- "github.com/charmbracelet/crush/internal/config"
- "github.com/invopop/jsonschema"
-)
-
-func main() {
- // Create a new reflector
- r := &jsonschema.Reflector{
- // Use anonymous schemas to avoid ID conflicts
- Anonymous: true,
- // Expand the root struct instead of referencing it
- ExpandedStruct: true,
- AllowAdditionalProperties: true,
- }
-
- // Generate schema for the main Config struct
- schema := r.Reflect(&config.Config{})
-
- // Enhance the schema with additional information
- enhanceSchema(schema)
-
- // Set the schema metadata
- schema.Version = "https://json-schema.org/draft/2020-12/schema"
- schema.Title = "Crush Configuration"
- schema.Description = "Configuration schema for the Crush application"
-
- // Pretty print the schema
- encoder := json.NewEncoder(os.Stdout)
- encoder.SetIndent("", " ")
- if err := encoder.Encode(schema); err != nil {
- fmt.Fprintf(os.Stderr, "Error encoding schema: %v\n", err)
- os.Exit(1)
- }
-}
-
-// enhanceSchema adds additional enhancements to the generated schema
-func enhanceSchema(schema *jsonschema.Schema) {
- // Add provider enums
- addProviderEnums(schema)
-
- // Add model enums
- addModelEnums(schema)
-
- // Add tool enums
- addToolEnums(schema)
-
- // Add default context paths
- addDefaultContextPaths(schema)
-}
-
-// addProviderEnums adds provider enums to the schema
-func addProviderEnums(schema *jsonschema.Schema) {
- providers := config.Providers()
- var providerIDs []any
- for _, p := range providers {
- providerIDs = append(providerIDs, string(p.ID))
- }
-
- // Add to PreferredModel provider field
- if schema.Definitions != nil {
- if preferredModelDef, exists := schema.Definitions["PreferredModel"]; exists {
- if providerProp, exists := preferredModelDef.Properties.Get("provider"); exists {
- providerProp.Enum = providerIDs
- }
- }
-
- // Add to ProviderConfig ID field
- if providerConfigDef, exists := schema.Definitions["ProviderConfig"]; exists {
- if idProp, exists := providerConfigDef.Properties.Get("id"); exists {
- idProp.Enum = providerIDs
- }
- }
- }
-}
-
-// addModelEnums adds model enums to the schema
-func addModelEnums(schema *jsonschema.Schema) {
- providers := config.Providers()
- var modelIDs []any
- for _, p := range providers {
- for _, m := range p.Models {
- modelIDs = append(modelIDs, m.ID)
- }
- }
-
- // Add to PreferredModel model_id field
- if schema.Definitions != nil {
- if preferredModelDef, exists := schema.Definitions["PreferredModel"]; exists {
- if modelIDProp, exists := preferredModelDef.Properties.Get("model_id"); exists {
- modelIDProp.Enum = modelIDs
- }
- }
- }
-}
-
-// addToolEnums adds tool enums to the schema
-func addToolEnums(schema *jsonschema.Schema) {
- tools := []any{
- "bash", "edit", "fetch", "glob", "grep", "ls", "sourcegraph", "view", "write", "agent",
- }
-
- if schema.Definitions != nil {
- if agentDef, exists := schema.Definitions["Agent"]; exists {
- if allowedToolsProp, exists := agentDef.Properties.Get("allowed_tools"); exists {
- if allowedToolsProp.Items != nil {
- allowedToolsProp.Items.Enum = tools
- }
- }
- }
- }
-}
-
-// addDefaultContextPaths adds default context paths to the schema
-func addDefaultContextPaths(schema *jsonschema.Schema) {
- defaultContextPaths := []any{
- ".github/copilot-instructions.md",
- ".cursorrules",
- ".cursor/rules/",
- "CLAUDE.md",
- "CLAUDE.local.md",
- "GEMINI.md",
- "gemini.md",
- "crush.md",
- "crush.local.md",
- "Crush.md",
- "Crush.local.md",
- "CRUSH.md",
- "CRUSH.local.md",
- }
-
- if schema.Definitions != nil {
- if optionsDef, exists := schema.Definitions["Options"]; exists {
- if contextPathsProp, exists := optionsDef.Properties.Get("context_paths"); exists {
- contextPathsProp.Default = defaultContextPaths
- }
- }
- }
-
- // Also add to root properties if they exist
- if schema.Properties != nil {
- if optionsProp, exists := schema.Properties.Get("options"); exists {
- if optionsProp.Properties != nil {
- if contextPathsProp, exists := optionsProp.Properties.Get("context_paths"); exists {
- contextPathsProp.Default = defaultContextPaths
- }
- }
- }
- }
-}
@@ -1,700 +0,0 @@
-{
- "$schema": "https://json-schema.org/draft/2020-12/schema",
- "$defs": {
- "Agent": {
- "properties": {
- "id": {
- "type": "string",
- "enum": [
- "coder",
- "task",
- "coder",
- "task"
- ],
- "title": "Agent ID",
- "description": "Unique identifier for the agent"
- },
- "name": {
- "type": "string",
- "title": "Name",
- "description": "Display name of the agent"
- },
- "description": {
- "type": "string",
- "title": "Description",
- "description": "Description of what the agent does"
- },
- "disabled": {
- "type": "boolean",
- "title": "Disabled",
- "description": "Whether this agent is disabled",
- "default": false
- },
- "model": {
- "type": "string",
- "enum": [
- "large",
- "small",
- "large",
- "small"
- ],
- "title": "Model Type",
- "description": "Type of model to use (large or small)"
- },
- "allowed_tools": {
- "items": {
- "type": "string",
- "enum": [
- "bash",
- "edit",
- "fetch",
- "glob",
- "grep",
- "ls",
- "sourcegraph",
- "view",
- "write",
- "agent"
- ]
- },
- "type": "array",
- "title": "Allowed Tools",
- "description": "List of tools this agent is allowed to use (if nil all tools are allowed)"
- },
- "allowed_mcp": {
- "additionalProperties": {
- "items": {
- "type": "string"
- },
- "type": "array"
- },
- "type": "object",
- "title": "Allowed MCP",
- "description": "Map of MCP servers this agent can use and their allowed tools"
- },
- "allowed_lsp": {
- "items": {
- "type": "string"
- },
- "type": "array",
- "title": "Allowed LSP",
- "description": "List of LSP servers this agent can use (if nil all LSPs are allowed)"
- },
- "context_paths": {
- "items": {
- "type": "string"
- },
- "type": "array",
- "title": "Context Paths",
- "description": "Custom context paths for this agent (additive to global context paths)"
- }
- },
- "type": "object",
- "required": [
- "model"
- ]
- },
- "LSPConfig": {
- "properties": {
- "enabled": {
- "type": "boolean",
- "title": "Enabled",
- "description": "Whether this LSP server is enabled",
- "default": true
- },
- "command": {
- "type": "string",
- "title": "Command",
- "description": "Command to execute for the LSP server"
- },
- "args": {
- "items": {
- "type": "string"
- },
- "type": "array",
- "title": "Arguments",
- "description": "Command line arguments for the LSP server"
- },
- "options": {
- "title": "Options",
- "description": "LSP server specific options"
- }
- },
- "type": "object",
- "required": [
- "command"
- ]
- },
- "MCP": {
- "properties": {
- "command": {
- "type": "string",
- "title": "Command",
- "description": "Command to execute for stdio MCP servers"
- },
- "env": {
- "items": {
- "type": "string"
- },
- "type": "array",
- "title": "Environment",
- "description": "Environment variables for the MCP server"
- },
- "args": {
- "items": {
- "type": "string"
- },
- "type": "array",
- "title": "Arguments",
- "description": "Command line arguments for the MCP server"
- },
- "type": {
- "type": "string",
- "enum": [
- "stdio",
- "sse",
- "stdio",
- "sse",
- "http"
- ],
- "title": "Type",
- "description": "Type of MCP connection",
- "default": "stdio"
- },
- "url": {
- "type": "string",
- "title": "URL",
- "description": "URL for SSE MCP servers"
- },
- "headers": {
- "additionalProperties": {
- "type": "string"
- },
- "type": "object",
- "title": "Headers",
- "description": "HTTP headers for SSE MCP servers"
- }
- },
- "type": "object",
- "required": [
- "type"
- ]
- },
- "Model": {
- "properties": {
- "id": {
- "type": "string",
- "title": "Model ID",
- "description": "Unique identifier for the model"
- },
- "name": {
- "type": "string",
- "title": "Model Name",
- "description": "Display name of the model"
- },
- "cost_per_1m_in": {
- "type": "number",
- "minimum": 0,
- "title": "Input Cost",
- "description": "Cost per 1 million input tokens"
- },
- "cost_per_1m_out": {
- "type": "number",
- "minimum": 0,
- "title": "Output Cost",
- "description": "Cost per 1 million output tokens"
- },
- "cost_per_1m_in_cached": {
- "type": "number",
- "minimum": 0,
- "title": "Cached Input Cost",
- "description": "Cost per 1 million cached input tokens"
- },
- "cost_per_1m_out_cached": {
- "type": "number",
- "minimum": 0,
- "title": "Cached Output Cost",
- "description": "Cost per 1 million cached output tokens"
- },
- "context_window": {
- "type": "integer",
- "minimum": 1,
- "title": "Context Window",
- "description": "Maximum context window size in tokens"
- },
- "default_max_tokens": {
- "type": "integer",
- "minimum": 1,
- "title": "Default Max Tokens",
- "description": "Default maximum tokens for responses"
- },
- "can_reason": {
- "type": "boolean",
- "title": "Can Reason",
- "description": "Whether the model supports reasoning capabilities"
- },
- "reasoning_effort": {
- "type": "string",
- "title": "Reasoning Effort",
- "description": "Default reasoning effort level for reasoning models"
- },
- "has_reasoning_effort": {
- "type": "boolean",
- "title": "Has Reasoning Effort",
- "description": "Whether the model supports reasoning effort configuration"
- },
- "supports_attachments": {
- "type": "boolean",
- "title": "Supports Images",
- "description": "Whether the model supports image attachments"
- }
- },
- "type": "object",
- "required": [
- "id",
- "name",
- "context_window",
- "default_max_tokens"
- ]
- },
- "Options": {
- "properties": {
- "context_paths": {
- "items": {
- "type": "string"
- },
- "type": "array",
- "title": "Context Paths",
- "description": "List of paths to search for context files",
- "default": [
- ".github/copilot-instructions.md",
- ".cursorrules",
- ".cursor/rules/",
- "CLAUDE.md",
- "CLAUDE.local.md",
- "GEMINI.md",
- "gemini.md",
- "crush.md",
- "crush.local.md",
- "Crush.md",
- "Crush.local.md",
- "CRUSH.md",
- "CRUSH.local.md"
- ]
- },
- "tui": {
- "$ref": "#/$defs/TUIOptions",
- "title": "TUI Options",
- "description": "Terminal UI configuration options"
- },
- "debug": {
- "type": "boolean",
- "title": "Debug",
- "description": "Enable debug logging",
- "default": false
- },
- "debug_lsp": {
- "type": "boolean",
- "title": "Debug LSP",
- "description": "Enable LSP debug logging",
- "default": false
- },
- "disable_auto_summarize": {
- "type": "boolean",
- "title": "Disable Auto Summarize",
- "description": "Disable automatic conversation summarization",
- "default": false
- },
- "data_directory": {
- "type": "string",
- "title": "Data Directory",
- "description": "Directory for storing application data",
- "default": ".crush"
- }
- },
- "type": "object"
- },
- "PreferredModel": {
- "properties": {
- "model_id": {
- "type": "string",
- "enum": [
- "claude-opus-4-20250514",
- "claude-sonnet-4-20250514",
- "claude-3-7-sonnet-20250219",
- "claude-3-5-haiku-20241022",
- "claude-3-5-sonnet-20240620",
- "claude-3-5-sonnet-20241022",
- "codex-mini-latest",
- "o4-mini",
- "o3",
- "o3-pro",
- "gpt-4.1",
- "gpt-4.1-mini",
- "gpt-4.1-nano",
- "gpt-4.5-preview",
- "o3-mini",
- "gpt-4o",
- "gpt-4o-mini",
- "gemini-2.5-pro",
- "gemini-2.5-flash",
- "codex-mini-latest",
- "o4-mini",
- "o3",
- "o3-pro",
- "gpt-4.1",
- "gpt-4.1-mini",
- "gpt-4.1-nano",
- "gpt-4.5-preview",
- "o3-mini",
- "gpt-4o",
- "gpt-4o-mini",
- "anthropic.claude-opus-4-20250514-v1:0",
- "anthropic.claude-sonnet-4-20250514-v1:0",
- "anthropic.claude-3-7-sonnet-20250219-v1:0",
- "anthropic.claude-3-5-haiku-20241022-v1:0",
- "gemini-2.5-pro",
- "gemini-2.5-flash",
- "grok-3-mini",
- "grok-3",
- "mistralai/mistral-small-3.2-24b-instruct:free",
- "mistralai/mistral-small-3.2-24b-instruct",
- "minimax/minimax-m1:extended",
- "minimax/minimax-m1",
- "google/gemini-2.5-flash-lite-preview-06-17",
- "google/gemini-2.5-flash",
- "google/gemini-2.5-pro",
- "openai/o3-pro",
- "x-ai/grok-3-mini",
- "x-ai/grok-3",
- "mistralai/magistral-small-2506",
- "mistralai/magistral-medium-2506",
- "mistralai/magistral-medium-2506:thinking",
- "google/gemini-2.5-pro-preview",
- "deepseek/deepseek-r1-0528",
- "anthropic/claude-opus-4",
- "anthropic/claude-sonnet-4",
- "mistralai/devstral-small:free",
- "mistralai/devstral-small",
- "google/gemini-2.5-flash-preview-05-20",
- "google/gemini-2.5-flash-preview-05-20:thinking",
- "openai/codex-mini",
- "mistralai/mistral-medium-3",
- "google/gemini-2.5-pro-preview-05-06",
- "arcee-ai/caller-large",
- "arcee-ai/virtuoso-large",
- "arcee-ai/virtuoso-medium-v2",
- "qwen/qwen3-30b-a3b",
- "qwen/qwen3-14b",
- "qwen/qwen3-32b",
- "qwen/qwen3-235b-a22b",
- "google/gemini-2.5-flash-preview",
- "google/gemini-2.5-flash-preview:thinking",
- "openai/o4-mini-high",
- "openai/o3",
- "openai/o4-mini",
- "openai/gpt-4.1",
- "openai/gpt-4.1-mini",
- "openai/gpt-4.1-nano",
- "x-ai/grok-3-mini-beta",
- "x-ai/grok-3-beta",
- "meta-llama/llama-4-maverick",
- "meta-llama/llama-4-scout",
- "all-hands/openhands-lm-32b-v0.1",
- "google/gemini-2.5-pro-exp-03-25",
- "deepseek/deepseek-chat-v3-0324:free",
- "deepseek/deepseek-chat-v3-0324",
- "mistralai/mistral-small-3.1-24b-instruct:free",
- "mistralai/mistral-small-3.1-24b-instruct",
- "ai21/jamba-1.6-large",
- "ai21/jamba-1.6-mini",
- "openai/gpt-4.5-preview",
- "google/gemini-2.0-flash-lite-001",
- "anthropic/claude-3.7-sonnet",
- "anthropic/claude-3.7-sonnet:beta",
- "anthropic/claude-3.7-sonnet:thinking",
- "mistralai/mistral-saba",
- "openai/o3-mini-high",
- "google/gemini-2.0-flash-001",
- "qwen/qwen-turbo",
- "qwen/qwen-plus",
- "qwen/qwen-max",
- "openai/o3-mini",
- "mistralai/mistral-small-24b-instruct-2501",
- "deepseek/deepseek-r1-distill-llama-70b",
- "deepseek/deepseek-r1",
- "mistralai/codestral-2501",
- "deepseek/deepseek-chat",
- "openai/o1",
- "x-ai/grok-2-1212",
- "meta-llama/llama-3.3-70b-instruct",
- "amazon/nova-lite-v1",
- "amazon/nova-micro-v1",
- "amazon/nova-pro-v1",
- "openai/gpt-4o-2024-11-20",
- "mistralai/mistral-large-2411",
- "mistralai/mistral-large-2407",
- "mistralai/pixtral-large-2411",
- "thedrummer/unslopnemo-12b",
- "anthropic/claude-3.5-haiku:beta",
- "anthropic/claude-3.5-haiku",
- "anthropic/claude-3.5-haiku-20241022:beta",
- "anthropic/claude-3.5-haiku-20241022",
- "anthropic/claude-3.5-sonnet:beta",
- "anthropic/claude-3.5-sonnet",
- "x-ai/grok-beta",
- "mistralai/ministral-8b",
- "mistralai/ministral-3b",
- "nvidia/llama-3.1-nemotron-70b-instruct",
- "google/gemini-flash-1.5-8b",
- "meta-llama/llama-3.2-11b-vision-instruct",
- "meta-llama/llama-3.2-3b-instruct",
- "qwen/qwen-2.5-72b-instruct",
- "mistralai/pixtral-12b",
- "cohere/command-r-plus-08-2024",
- "cohere/command-r-08-2024",
- "microsoft/phi-3.5-mini-128k-instruct",
- "nousresearch/hermes-3-llama-3.1-70b",
- "openai/gpt-4o-2024-08-06",
- "meta-llama/llama-3.1-405b-instruct",
- "meta-llama/llama-3.1-70b-instruct",
- "meta-llama/llama-3.1-8b-instruct",
- "mistralai/mistral-nemo",
- "openai/gpt-4o-mini",
- "openai/gpt-4o-mini-2024-07-18",
- "anthropic/claude-3.5-sonnet-20240620:beta",
- "anthropic/claude-3.5-sonnet-20240620",
- "mistralai/mistral-7b-instruct-v0.3",
- "mistralai/mistral-7b-instruct:free",
- "mistralai/mistral-7b-instruct",
- "microsoft/phi-3-mini-128k-instruct",
- "microsoft/phi-3-medium-128k-instruct",
- "google/gemini-flash-1.5",
- "openai/gpt-4o-2024-05-13",
- "openai/gpt-4o",
- "openai/gpt-4o:extended",
- "meta-llama/llama-3-8b-instruct",
- "meta-llama/llama-3-70b-instruct",
- "mistralai/mixtral-8x22b-instruct",
- "openai/gpt-4-turbo",
- "google/gemini-pro-1.5",
- "cohere/command-r-plus",
- "cohere/command-r-plus-04-2024",
- "cohere/command-r",
- "anthropic/claude-3-haiku:beta",
- "anthropic/claude-3-haiku",
- "anthropic/claude-3-opus:beta",
- "anthropic/claude-3-opus",
- "anthropic/claude-3-sonnet:beta",
- "anthropic/claude-3-sonnet",
- "cohere/command-r-03-2024",
- "mistralai/mistral-large",
- "openai/gpt-3.5-turbo-0613",
- "openai/gpt-4-turbo-preview",
- "mistralai/mistral-small",
- "mistralai/mistral-tiny",
- "mistralai/mixtral-8x7b-instruct",
- "openai/gpt-4-1106-preview",
- "mistralai/mistral-7b-instruct-v0.1",
- "openai/gpt-3.5-turbo-16k",
- "openai/gpt-4",
- "openai/gpt-4-0314"
- ],
- "title": "Model ID",
- "description": "ID of the preferred model"
- },
- "provider": {
- "type": "string",
- "enum": [
- "anthropic",
- "openai",
- "gemini",
- "azure",
- "bedrock",
- "vertex",
- "xai",
- "openrouter"
- ],
- "title": "Provider",
- "description": "Provider for the preferred model"
- },
- "reasoning_effort": {
- "type": "string",
- "title": "Reasoning Effort",
- "description": "Override reasoning effort for this model"
- },
- "max_tokens": {
- "type": "integer",
- "minimum": 1,
- "title": "Max Tokens",
- "description": "Override max tokens for this model"
- },
- "think": {
- "type": "boolean",
- "title": "Think",
- "description": "Enable thinking for reasoning models",
- "default": false
- }
- },
- "type": "object",
- "required": [
- "model_id",
- "provider"
- ]
- },
- "PreferredModels": {
- "properties": {
- "large": {
- "$ref": "#/$defs/PreferredModel",
- "title": "Large Model",
- "description": "Preferred model configuration for large model type"
- },
- "small": {
- "$ref": "#/$defs/PreferredModel",
- "title": "Small Model",
- "description": "Preferred model configuration for small model type"
- }
- },
- "type": "object"
- },
- "ProviderConfig": {
- "properties": {
- "id": {
- "type": "string",
- "enum": [
- "anthropic",
- "openai",
- "gemini",
- "azure",
- "bedrock",
- "vertex",
- "xai",
- "openrouter"
- ],
- "title": "Provider ID",
- "description": "Unique identifier for the provider"
- },
- "base_url": {
- "type": "string",
- "title": "Base URL",
- "description": "Base URL for the provider API (required for custom providers)"
- },
- "provider_type": {
- "type": "string",
- "title": "Provider Type",
- "description": "Type of the provider (openai"
- },
- "api_key": {
- "type": "string",
- "title": "API Key",
- "description": "API key for authenticating with the provider"
- },
- "disabled": {
- "type": "boolean",
- "title": "Disabled",
- "description": "Whether this provider is disabled",
- "default": false
- },
- "extra_headers": {
- "additionalProperties": {
- "type": "string"
- },
- "type": "object",
- "title": "Extra Headers",
- "description": "Additional HTTP headers to send with requests"
- },
- "extra_params": {
- "additionalProperties": {
- "type": "string"
- },
- "type": "object",
- "title": "Extra Parameters",
- "description": "Additional provider-specific parameters"
- },
- "default_large_model": {
- "type": "string",
- "title": "Default Large Model",
- "description": "Default model ID for large model type"
- },
- "default_small_model": {
- "type": "string",
- "title": "Default Small Model",
- "description": "Default model ID for small model type"
- },
- "models": {
- "items": {
- "$ref": "#/$defs/Model"
- },
- "type": "array",
- "title": "Models",
- "description": "List of available models for this provider"
- }
- },
- "type": "object",
- "required": [
- "provider_type"
- ]
- },
- "TUIOptions": {
- "properties": {
- "compact_mode": {
- "type": "boolean",
- "title": "Compact Mode",
- "description": "Enable compact mode for the TUI",
- "default": false
- }
- },
- "type": "object",
- "required": [
- "compact_mode"
- ]
- }
- },
- "properties": {
- "models": {
- "$ref": "#/$defs/PreferredModels",
- "title": "Models",
- "description": "Preferred model configurations for large and small model types"
- },
- "providers": {
- "additionalProperties": {
- "$ref": "#/$defs/ProviderConfig"
- },
- "type": "object",
- "title": "Providers",
- "description": "LLM provider configurations"
- },
- "agents": {
- "additionalProperties": {
- "$ref": "#/$defs/Agent"
- },
- "type": "object",
- "title": "Agents",
- "description": "Agent configurations for different tasks"
- },
- "mcp": {
- "additionalProperties": {
- "$ref": "#/$defs/MCP"
- },
- "type": "object",
- "title": "MCP",
- "description": "Model Control Protocol server configurations"
- },
- "lsp": {
- "additionalProperties": {
- "$ref": "#/$defs/LSPConfig"
- },
- "type": "object",
- "title": "LSP",
- "description": "Language Server Protocol configurations"
- },
- "options": {
- "$ref": "#/$defs/Options",
- "title": "Options",
- "description": "General application options and settings"
- }
- },
- "type": "object",
- "title": "Crush Configuration",
- "description": "Configuration schema for the Crush application"
-}
@@ -1,15 +1,7 @@
{
- "$schema": "./crush-schema.json",
"lsp": {
"go": {
"command": "gopls"
}
- },
- "mcp": {
- "context7": {
- "command": "",
- "url": "https://mcp.context7.com/mcp",
- "type": "http"
- }
}
}
@@ -1 +1 @@
-{"flagWords":[],"words":["afero","alecthomas","bubbletea","charmbracelet","charmtone","Charple","crush","diffview","Emph","filepicker","Focusable","fsext","GROQ","Guac","imageorient","Lanczos","lipgloss","lsps","lucasb","nfnt","oksvg","Preproc","rasterx","rivo","Sourcegraph","srwiley","Strikethrough","termenv","textinput","trashhalo","uniseg","Unticked","genai","jsonschema"],"version":"0.2","language":"en"}
+{"words":["afero","alecthomas","bubbletea","charmbracelet","charmtone","Charple","crush","diffview","Emph","filepicker","Focusable","fsext","GROQ","Guac","imageorient","Lanczos","lipgloss","lsps","lucasb","nfnt","oksvg","Preproc","rasterx","rivo","Sourcegraph","srwiley","Strikethrough","termenv","textinput","trashhalo","uniseg","Unticked","genai","jsonschema","preconfigured","jsons","qjebbs","LOCALAPPDATA","USERPROFILE","stretchr","cursorrules","VERTEXAI","VERTEXAI"],"flagWords":[],"language":"en","version":"0.2"}
@@ -2,6 +2,10 @@ module github.com/charmbracelet/crush
go 1.24.3
+replace github.com/charmbracelet/bubbletea/v2 => github.com/charmbracelet/bubbletea-internal/v2 v2.0.0-20250703182356-a42fb608faaf
+
+replace github.com/charmbracelet/lipgloss/v2 => github.com/charmbracelet/lipgloss-internal/v2 v2.0.0-20250703152138-ff346e83e819
+
require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.7.0
github.com/JohannesKaufmann/html-to-markdown v1.6.0
@@ -13,34 +17,47 @@ require (
github.com/bmatcuk/doublestar/v4 v4.8.1
github.com/charlievieth/fastwalk v1.0.11
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250607113720-eb5e1cf3b09e
- github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3.0.20250609143341-c76fa36f1b94
+ github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.1
github.com/charmbracelet/fang v0.1.0
github.com/charmbracelet/glamour/v2 v2.0.0-20250516160903-6f1e2c8f9ebe
github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.2.0.20250703152125-8e1c474f8a71
- github.com/charmbracelet/x/ansi v0.9.3-0.20250602153603-fb931ed90413
+ github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706
+ github.com/charmbracelet/x/ansi v0.9.3
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250627134340-c144409e381c
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a
github.com/disintegration/imageorient v0.0.0-20180920195336-8147d86e83ec
github.com/fsnotify/fsnotify v1.8.0
- github.com/go-logfmt/logfmt v0.6.0
github.com/google/uuid v1.6.0
- github.com/invopop/jsonschema v0.13.0
+ github.com/joho/godotenv v1.5.1
github.com/mark3labs/mcp-go v0.32.0
github.com/muesli/termenv v0.16.0
github.com/ncruces/go-sqlite3 v0.25.0
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646
+ github.com/nxadm/tail v1.4.11
github.com/openai/openai-go v1.8.2
github.com/pressly/goose/v3 v3.24.2
+ github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c
github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06
github.com/sahilm/fuzzy v0.1.1
github.com/spf13/cobra v1.9.1
github.com/srwiley/oksvg v0.0.0-20221011165216-be6e8873101c
github.com/srwiley/rasterx v0.0.0-20220730225603-2ab79fcdd4ef
github.com/stretchr/testify v1.10.0
+ golang.org/x/exp v0.0.0-20250305212735-054e65f0b394
+ gopkg.in/natefinch/lumberjack.v2 v2.2.1
mvdan.cc/sh/v3 v3.11.0
)
-require github.com/spf13/cast v1.7.1 // indirect
+require (
+ github.com/go-logfmt/logfmt v0.6.0 // indirect
+ github.com/spf13/cast v1.7.1 // indirect
+ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
+)
+
+require (
+ github.com/charmbracelet/ultraviolet v0.0.0-20250707134318-0fdaa64b8c5e // indirect
+ github.com/charmbracelet/x/termios v0.1.1 // indirect
+)
require (
cloud.google.com/go v0.116.0 // indirect
@@ -67,13 +84,10 @@ require (
github.com/aws/smithy-go v1.20.3 // indirect
github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect
github.com/aymerick/douceur v0.2.0 // indirect
- github.com/bahlo/generic-list-go v0.2.0 // indirect
- github.com/buger/jsonparser v1.1.1 // indirect
github.com/charmbracelet/colorprofile v0.3.1 // indirect
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250516160309-24eee56f89fa // indirect
github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef
- github.com/charmbracelet/x/input v0.3.5-0.20250509021451-13796e822d86 // indirect
- github.com/charmbracelet/x/term v0.2.1 // indirect
+ github.com/charmbracelet/x/term v0.2.1
github.com/charmbracelet/x/windows v0.2.1 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/disintegration/gift v1.1.2 // indirect
@@ -92,7 +106,6 @@ require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/kylelemons/godebug v1.1.0 // indirect
github.com/lucasb-eyer/go-colorful v1.2.0
- github.com/mailru/easyjson v0.7.7 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
@@ -114,7 +127,6 @@ require (
github.com/tidwall/match v1.1.1 // indirect
github.com/tidwall/pretty v1.2.1 // indirect
github.com/tidwall/sjson v1.2.5 // indirect
- github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
github.com/yosida95/uritemplate/v3 v3.0.2 // indirect
github.com/yuin/goldmark v1.7.8 // indirect
@@ -128,8 +140,8 @@ require (
golang.org/x/crypto v0.37.0 // indirect
golang.org/x/image v0.26.0 // indirect
golang.org/x/net v0.39.0 // indirect
- golang.org/x/sync v0.13.0 // indirect
- golang.org/x/sys v0.32.0 // indirect
+ golang.org/x/sync v0.15.0 // indirect
+ golang.org/x/sys v0.33.0 // indirect
golang.org/x/term v0.31.0 // indirect
golang.org/x/text v0.24.0 // indirect
google.golang.org/genai v1.3.0
@@ -64,30 +64,28 @@ github.com/aymanbagabas/go-udiff v0.3.1 h1:LV+qyBQ2pqe0u42ZsUEtPiCaUoqgA9gYRDs3v
github.com/aymanbagabas/go-udiff v0.3.1/go.mod h1:G0fsKmG+P6ylD0r6N/KgQD/nWzgfnl8ZBcNLgcbrw8E=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
-github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
-github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
github.com/bmatcuk/doublestar/v4 v4.8.1 h1:54Bopc5c2cAvhLRAzqOGCYHYyhcDHsFF4wWIR5wKP38=
github.com/bmatcuk/doublestar/v4 v4.8.1/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc=
-github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
-github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
github.com/charlievieth/fastwalk v1.0.11 h1:5sLT/q9+d9xMdpKExawLppqvXFZCVKf6JHnr2u/ufj8=
github.com/charlievieth/fastwalk v1.0.11/go.mod h1:yGy1zbxog41ZVMcKA/i8ojXLFsuayX5VvwhQVoj9PBI=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250607113720-eb5e1cf3b09e h1:99Ugtt633rqauFsXjZobZmtkNpeaWialfj8dl6COC6A=
github.com/charmbracelet/bubbles/v2 v2.0.0-beta.1.0.20250607113720-eb5e1cf3b09e/go.mod h1:6HamsBKWqEC/FVHuQMHgQL+knPyvHH55HwJDHl/adMw=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3.0.20250609143341-c76fa36f1b94 h1:QIi50k+uNTJmp2sMs+33D1m/EWr/7OPTJ8x92AY3eOc=
-github.com/charmbracelet/bubbletea/v2 v2.0.0-beta.3.0.20250609143341-c76fa36f1b94/go.mod h1:oOn1YZGZyJHxJfh4sFAna9vDzxJRNuErLETr/lnlB/I=
+github.com/charmbracelet/bubbletea-internal/v2 v2.0.0-20250703182356-a42fb608faaf h1:td6+JCUKCFW0liyAotsxnGicPxyar4cggCUCT/g0mfQ=
+github.com/charmbracelet/bubbletea-internal/v2 v2.0.0-20250703182356-a42fb608faaf/go.mod h1:LTnqM6sOjG1yY5MDFlqefgN+0EFGTV3PU5QoDq9EUDU=
github.com/charmbracelet/colorprofile v0.3.1 h1:k8dTHMd7fgw4bnFd7jXTLZrSU/CQrKnL3m+AxCzDz40=
github.com/charmbracelet/colorprofile v0.3.1/go.mod h1:/GkGusxNs8VB/RSOh3fu0TJmQ4ICMMPApIIVn0KszZ0=
github.com/charmbracelet/fang v0.1.0 h1:SlZS2crf3/zQh7Mr4+W+7QR1k+L08rrPX5rm5z3d7Wg=
github.com/charmbracelet/fang v0.1.0/go.mod h1:Zl/zeUQ8EtQuGyiV0ZKZlZPDowKRTzu8s/367EpN/fc=
github.com/charmbracelet/glamour/v2 v2.0.0-20250516160903-6f1e2c8f9ebe h1:i6ce4CcAlPpTj2ER69m1DBeLZ3RRcHnKExuwhKa3GfY=
github.com/charmbracelet/glamour/v2 v2.0.0-20250516160903-6f1e2c8f9ebe/go.mod h1:p3Q+aN4eQKeM5jhrmXPMgPrlKbmc59rWSnMsSA3udhk=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1.0.20250523195325-2d1af06b557c h1:177KMz8zHRlEZJsWzafbKYh6OdjgvTspoH+UjaxgIXY=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.1.0.20250523195325-2d1af06b557c/go.mod h1:EJWvaCrhOhNGVZMvcjc0yVryl4qqpMs8tz0r9WyEkdQ=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.2.0.20250703152125-8e1c474f8a71 h1:X0tsNa2UHCKNw+illiavosasVzqioRo32SRV35iwr2I=
-github.com/charmbracelet/lipgloss/v2 v2.0.0-beta.2.0.20250703152125-8e1c474f8a71/go.mod h1:EJWvaCrhOhNGVZMvcjc0yVryl4qqpMs8tz0r9WyEkdQ=
-github.com/charmbracelet/x/ansi v0.9.3-0.20250602153603-fb931ed90413 h1:L07QkDqRF274IZ2UJ/mCTL8DR95efU9BNWLYCDXEjvQ=
-github.com/charmbracelet/x/ansi v0.9.3-0.20250602153603-fb931ed90413/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
+github.com/charmbracelet/lipgloss-internal/v2 v2.0.0-20250703152138-ff346e83e819 h1:2i1WGevIAY9eV/vHWwe5Diu58DQ6h9RvKTDpo13zmdA=
+github.com/charmbracelet/lipgloss-internal/v2 v2.0.0-20250703152138-ff346e83e819/go.mod h1:kz1eyf3LJ8c5/R2RH/1Pk0kb9BjvbVcY49WCQemk4sg=
+github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706 h1:WkwO6Ks3mSIGnGuSdKl9qDSyfbYK50z2wc2gGMggegE=
+github.com/charmbracelet/log/v2 v2.0.0-20250226163916-c379e29ff706/go.mod h1:mjJGp00cxcfvD5xdCa+bso251Jt4owrQvuimJtVmEmM=
+github.com/charmbracelet/ultraviolet v0.0.0-20250707134318-0fdaa64b8c5e h1:wimsDgs/7NRciipaniyugwqHJNTCjJna68bobyPqYx8=
+github.com/charmbracelet/ultraviolet v0.0.0-20250707134318-0fdaa64b8c5e/go.mod h1:/O+B00+dYG6lqRAWIaNxSvywnDrIH6dmLYQAsH0LRTg=
+github.com/charmbracelet/x/ansi v0.9.3 h1:BXt5DHS/MKF+LjuK4huWrC6NCvHtexww7dMayh6GXd0=
+github.com/charmbracelet/x/ansi v0.9.3/go.mod h1:3RQDQ6lDnROptfpWuUVIUG64bD2g2BgntdxH0Ya5TeE=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250516160309-24eee56f89fa h1:lphz0Z3rsiOtMYiz8axkT24i9yFiueDhJbzyNUADmME=
github.com/charmbracelet/x/cellbuf v0.0.14-0.20250516160309-24eee56f89fa/go.mod h1:xBlh2Yi3DL3zy/2n15kITpg0YZardf/aa/hgUaIM6Rk=
github.com/charmbracelet/x/exp/charmtone v0.0.0-20250627134340-c144409e381c h1:2GELBLPgfSbHU53bsQhR9XIgNuVZ6w+Rz8RWV5Lq+A4=
@@ -96,10 +94,10 @@ github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a h1:FsHE
github.com/charmbracelet/x/exp/golden v0.0.0-20250207160936-21c02780d27a/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef h1:v7qwsZ2OxzlwvpKwz8dtZXp7fIJlcDEUOyFBNE4fz4Q=
github.com/charmbracelet/x/exp/slice v0.0.0-20250611152503-f53cdd7e01ef/go.mod h1:vI5nDVMWi6veaYH+0Fmvpbe/+cv/iJfMntdh+N0+Tms=
-github.com/charmbracelet/x/input v0.3.5-0.20250509021451-13796e822d86 h1:BxAEmOBIDajkgao3EsbBxKQCYvgYPGdT62WASLvtf4Y=
-github.com/charmbracelet/x/input v0.3.5-0.20250509021451-13796e822d86/go.mod h1:62Rp/6EtTxoeJDSdtpA3tJp3y3ZRpsiekBSje+K8htA=
github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ=
github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg=
+github.com/charmbracelet/x/termios v0.1.1 h1:o3Q2bT8eqzGnGPOYheoYS8eEleT5ZVNYNy8JawjaNZY=
+github.com/charmbracelet/x/termios v0.1.1/go.mod h1:rB7fnv1TgOPOyyKRJ9o+AsTU/vK5WHJ2ivHeut/Pcwo=
github.com/charmbracelet/x/windows v0.2.1 h1:3x7vnbpQrjpuq/4L+I4gNsG5htYoCiA5oe9hLjAij5I=
github.com/charmbracelet/x/windows v0.2.1/go.mod h1:ptZp16h40gDYqs5TSawSVW+yiLB13j4kSMA0lSCHL0M=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
@@ -120,6 +118,7 @@ github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2
github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
+github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M=
github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0=
github.com/go-logfmt/logfmt v0.6.0 h1:wGYYu3uicYdqXVgoYbvnkrPVXkuLM1p1ifugDMEdRi4=
@@ -153,9 +152,8 @@ github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUq
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
-github.com/invopop/jsonschema v0.13.0 h1:KvpoAJWEjR3uD9Kbm2HWJmqsEaHt8lBUpd0qHcIi21E=
-github.com/invopop/jsonschema v0.13.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0=
-github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
+github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
+github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
@@ -167,8 +165,6 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
-github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
-github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mark3labs/mcp-go v0.32.0 h1:fgwmbfL2gbd67obg57OfV2Dnrhs1HtSdlY/i5fn7MU8=
github.com/mark3labs/mcp-go v0.32.0/go.mod h1:rXqOudj/djTORU/ThxYx8fqEVj/5pvTuuebQ2RC7uk4=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
@@ -199,6 +195,8 @@ github.com/ncruces/julianday v1.0.0 h1:fH0OKwa7NWvniGQtxdJRxAgkBMolni2BjDHaWTxqt
github.com/ncruces/julianday v1.0.0/go.mod h1:Dusn2KvZrrovOMJuOt0TNXL6tB7U2E8kvza5fFc9G7g=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646 h1:zYyBkD/k9seD2A7fsi6Oo2LfFZAehjjQMERAvZLEDnQ=
github.com/nfnt/resize v0.0.0-20180221191011-83c6a9932646/go.mod h1:jpp1/29i3P1S/RLdc7JQKbRpFeM1dOBd8T9ki5s+AY8=
+github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY=
+github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc=
github.com/openai/openai-go v1.8.2 h1:UqSkJ1vCOPUpz9Ka5tS0324EJFEuOvMc+lA/EarJWP8=
github.com/openai/openai-go v1.8.2/go.mod h1:g461MYGXEXBVdV5SaR/5tNzNbSfwTBBefwc+LlDCK0Y=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
@@ -208,6 +206,8 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pressly/goose/v3 v3.24.2 h1:c/ie0Gm8rnIVKvnDQ/scHErv46jrDv9b4I0WRcFJzYU=
github.com/pressly/goose/v3 v3.24.2/go.mod h1:kjefwFB0eR4w30Td2Gj2Mznyw94vSP+2jJYkOVNbD1k=
+github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c h1:kmzxiX+OB0knCo1V0dkEkdPelzCdAzCURCfmFArn2/A=
+github.com/qjebbs/go-jsons v0.0.0-20221222033332-a534c5fc1c4c/go.mod h1:wNJrtinHyC3YSf6giEh4FJN8+yZV7nXBjvmfjhBIcw4=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
@@ -256,8 +256,6 @@ github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
-github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
-github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e h1:JVG44RsyaB9T2KIHavMF/ppJZNG9ZpyihvCd0w101no=
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e/go.mod h1:RbqR21r5mrJuqunuUZ/Dhy/avygyECGrLceyNeo4LiM=
github.com/yosida95/uritemplate/v3 v3.0.2 h1:Ed3Oyj9yrmi9087+NczuL5BwkIc4wvTb5zIM+UJPGz4=
@@ -311,13 +309,14 @@ golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610=
-golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
+golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8=
+golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -326,8 +325,8 @@ golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
-golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
+golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
+golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@@ -364,6 +363,10 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
+gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
@@ -5,6 +5,7 @@ import (
"database/sql"
"errors"
"fmt"
+ "log/slog"
"maps"
"sync"
"time"
@@ -14,7 +15,7 @@ import (
"github.com/charmbracelet/crush/internal/format"
"github.com/charmbracelet/crush/internal/history"
"github.com/charmbracelet/crush/internal/llm/agent"
- "github.com/charmbracelet/crush/internal/logging"
+
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/permission"
@@ -36,9 +37,11 @@ type App struct {
watcherCancelFuncs []context.CancelFunc
cancelFuncsMutex sync.Mutex
watcherWG sync.WaitGroup
+
+ config *config.Config
}
-func New(ctx context.Context, conn *sql.DB) (*App, error) {
+func New(ctx context.Context, conn *sql.DB, cfg *config.Config) (*App, error) {
q := db.New(conn)
sessions := session.NewService(q)
messages := message.NewService(q)
@@ -48,16 +51,16 @@ func New(ctx context.Context, conn *sql.DB) (*App, error) {
Sessions: sessions,
Messages: messages,
History: files,
- Permissions: permission.NewPermissionService(),
+ Permissions: permission.NewPermissionService(cfg.WorkingDir()),
LSPClients: make(map[string]*lsp.Client),
+ config: cfg,
}
// Initialize LSP clients in the background
go app.initLSPClients(ctx)
- cfg := config.Get()
-
- coderAgentCfg := cfg.Agents[config.AgentCoder]
+ // TODO: remove the concept of agent config most likely
+ coderAgentCfg := cfg.Agents["coder"]
if coderAgentCfg.ID == "" {
return nil, fmt.Errorf("coder agent configuration is missing")
}
@@ -72,7 +75,7 @@ func New(ctx context.Context, conn *sql.DB) (*App, error) {
app.LSPClients,
)
if err != nil {
- logging.Error("Failed to create coder agent", err)
+ slog.Error("Failed to create coder agent", "err", err)
return nil, err
}
@@ -81,7 +84,7 @@ func New(ctx context.Context, conn *sql.DB) (*App, error) {
// RunNonInteractive handles the execution flow when a prompt is provided via CLI flag.
func (a *App) RunNonInteractive(ctx context.Context, prompt string, outputFormat string, quiet bool) error {
- logging.Info("Running in non-interactive mode")
+ slog.Info("Running in non-interactive mode")
// Start spinner if not in quiet mode
var spinner *format.Spinner
@@ -106,7 +109,7 @@ func (a *App) RunNonInteractive(ctx context.Context, prompt string, outputFormat
if err != nil {
return fmt.Errorf("failed to create session for non-interactive mode: %w", err)
}
- logging.Info("Created session for non-interactive run", "session_id", sess.ID)
+ slog.Info("Created session for non-interactive run", "session_id", sess.ID)
// Automatically approve all permission requests for this non-interactive session
a.Permissions.AutoApproveSession(sess.ID)
@@ -119,7 +122,7 @@ func (a *App) RunNonInteractive(ctx context.Context, prompt string, outputFormat
result := <-done
if result.Error != nil {
if errors.Is(result.Error, context.Canceled) || errors.Is(result.Error, agent.ErrRequestCancelled) {
- logging.Info("Agent processing cancelled", "session_id", sess.ID)
+ slog.Info("Agent processing cancelled", "session_id", sess.ID)
return nil
}
return fmt.Errorf("agent processing failed: %w", result.Error)
@@ -138,7 +141,7 @@ func (a *App) RunNonInteractive(ctx context.Context, prompt string, outputFormat
fmt.Println(format.FormatOutput(content, outputFormat))
- logging.Info("Non-interactive run completed", "session_id", sess.ID)
+ slog.Info("Non-interactive run completed", "session_id", sess.ID)
return nil
}
@@ -162,7 +165,7 @@ func (app *App) Shutdown() {
for name, client := range clients {
shutdownCtx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
if err := client.Shutdown(shutdownCtx); err != nil {
- logging.Error("Failed to shutdown LSP client", "name", name, "error", err)
+ slog.Error("Failed to shutdown LSP client", "name", name, "error", err)
}
cancel()
}
@@ -2,34 +2,32 @@ package app
import (
"context"
+ "log/slog"
"time"
- "github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/logging"
+ "github.com/charmbracelet/crush/internal/log"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/lsp/watcher"
)
func (app *App) initLSPClients(ctx context.Context) {
- cfg := config.Get()
-
// Initialize LSP clients
- for name, clientConfig := range cfg.LSP {
+ for name, clientConfig := range app.config.LSP {
// Start each client initialization in its own goroutine
go app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
}
- logging.Info("LSP clients initialization started in background")
+ slog.Info("LSP clients initialization started in background")
}
// createAndStartLSPClient creates a new LSP client, initializes it, and starts its workspace watcher
func (app *App) createAndStartLSPClient(ctx context.Context, name string, command string, args ...string) {
// Create a specific context for initialization with a timeout
- logging.Info("Creating LSP client", "name", name, "command", command, "args", args)
+ slog.Info("Creating LSP client", "name", name, "command", command, "args", args)
// Create the LSP client
lspClient, err := lsp.NewClient(ctx, command, args...)
if err != nil {
- logging.Error("Failed to create LSP client for", name, err)
+ slog.Error("Failed to create LSP client for", name, err)
return
}
@@ -38,9 +36,9 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, comman
defer cancel()
// Initialize with the initialization context
- _, err = lspClient.InitializeLSPClient(initCtx, config.WorkingDirectory())
+ _, err = lspClient.InitializeLSPClient(initCtx, app.config.WorkingDir())
if err != nil {
- logging.Error("Initialize failed", "name", name, "error", err)
+ slog.Error("Initialize failed", "name", name, "error", err)
// Clean up the client to prevent resource leaks
lspClient.Close()
return
@@ -48,15 +46,15 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, comman
// Wait for the server to be ready
if err := lspClient.WaitForServerReady(initCtx); err != nil {
- logging.Error("Server failed to become ready", "name", name, "error", err)
+ slog.Error("Server failed to become ready", "name", name, "error", err)
// We'll continue anyway, as some functionality might still work
lspClient.SetServerState(lsp.StateError)
} else {
- logging.Info("LSP server is ready", "name", name)
+ slog.Info("LSP server is ready", "name", name)
lspClient.SetServerState(lsp.StateReady)
}
- logging.Info("LSP client initialized", "name", name)
+ slog.Info("LSP client initialized", "name", name)
// Create a child context that can be canceled when the app is shutting down
watchCtx, cancelFunc := context.WithCancel(ctx)
@@ -86,22 +84,21 @@ func (app *App) createAndStartLSPClient(ctx context.Context, name string, comman
// runWorkspaceWatcher executes the workspace watcher for an LSP client
func (app *App) runWorkspaceWatcher(ctx context.Context, name string, workspaceWatcher *watcher.WorkspaceWatcher) {
defer app.watcherWG.Done()
- defer logging.RecoverPanic("LSP-"+name, func() {
+ defer log.RecoverPanic("LSP-"+name, func() {
// Try to restart the client
app.restartLSPClient(ctx, name)
})
- workspaceWatcher.WatchWorkspace(ctx, config.WorkingDirectory())
- logging.Info("Workspace watcher stopped", "client", name)
+ workspaceWatcher.WatchWorkspace(ctx, app.config.WorkingDir())
+ slog.Info("Workspace watcher stopped", "client", name)
}
// restartLSPClient attempts to restart a crashed or failed LSP client
func (app *App) restartLSPClient(ctx context.Context, name string) {
// Get the original configuration
- cfg := config.Get()
- clientConfig, exists := cfg.LSP[name]
+ clientConfig, exists := app.config.LSP[name]
if !exists {
- logging.Error("Cannot restart client, configuration not found", "client", name)
+ slog.Error("Cannot restart client, configuration not found", "client", name)
return
}
@@ -122,5 +119,5 @@ func (app *App) restartLSPClient(ctx context.Context, name string) {
// Create a new client using the shared function
app.createAndStartLSPClient(ctx, name, clientConfig.Command, clientConfig.Args...)
- logging.Info("Successfully restarted LSP client", "client", name)
+ slog.Info("Successfully restarted LSP client", "client", name)
}
@@ -1,28 +1,17 @@
package config
import (
- "encoding/json"
- "errors"
"fmt"
- "log/slog"
- "maps"
- "os"
- "path/filepath"
"slices"
"strings"
- "sync"
"github.com/charmbracelet/crush/internal/fur/provider"
- "github.com/charmbracelet/crush/internal/logging"
- "github.com/invopop/jsonschema"
)
const (
+ appName = "crush"
defaultDataDirectory = ".crush"
defaultLogLevel = "info"
- appName = "crush"
-
- MaxTokensFallbackDefault = 4096
)
var defaultContextPaths = []string{
@@ -41,82 +30,51 @@ var defaultContextPaths = []string{
"CRUSH.local.md",
}
-type AgentID string
+type SelectedModelType string
const (
- AgentCoder AgentID = "coder"
- AgentTask AgentID = "task"
+ SelectedModelTypeLarge SelectedModelType = "large"
+ SelectedModelTypeSmall SelectedModelType = "small"
)
-type ModelType string
+type SelectedModel struct {
+ // The model id as used by the provider API.
+ // Required.
+ Model string `json:"model"`
+ // The model provider, same as the key/id used in the providers config.
+ // Required.
+ Provider string `json:"provider"`
-const (
- LargeModel ModelType = "large"
- SmallModel ModelType = "small"
-)
+ // Only used by models that use the openai provider and need this set.
+ ReasoningEffort string `json:"reasoning_effort,omitempty"`
-type Model struct {
- ID string `json:"id" jsonschema:"title=Model ID,description=Unique identifier for the model, the API model"`
- Name string `json:"name" jsonschema:"title=Model Name,description=Display name of the model"`
- CostPer1MIn float64 `json:"cost_per_1m_in,omitempty" jsonschema:"title=Input Cost,description=Cost per 1 million input tokens,minimum=0"`
- CostPer1MOut float64 `json:"cost_per_1m_out,omitempty" jsonschema:"title=Output Cost,description=Cost per 1 million output tokens,minimum=0"`
- CostPer1MInCached float64 `json:"cost_per_1m_in_cached,omitempty" jsonschema:"title=Cached Input Cost,description=Cost per 1 million cached input tokens,minimum=0"`
- CostPer1MOutCached float64 `json:"cost_per_1m_out_cached,omitempty" jsonschema:"title=Cached Output Cost,description=Cost per 1 million cached output tokens,minimum=0"`
- ContextWindow int64 `json:"context_window" jsonschema:"title=Context Window,description=Maximum context window size in tokens,minimum=1"`
- DefaultMaxTokens int64 `json:"default_max_tokens" jsonschema:"title=Default Max Tokens,description=Default maximum tokens for responses,minimum=1"`
- CanReason bool `json:"can_reason,omitempty" jsonschema:"title=Can Reason,description=Whether the model supports reasoning capabilities"`
- ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"title=Reasoning Effort,description=Default reasoning effort level for reasoning models"`
- HasReasoningEffort bool `json:"has_reasoning_effort,omitempty" jsonschema:"title=Has Reasoning Effort,description=Whether the model supports reasoning effort configuration"`
- SupportsImages bool `json:"supports_attachments,omitempty" jsonschema:"title=Supports Images,description=Whether the model supports image attachments"`
-}
+ // Overrides the default model configuration.
+ MaxTokens int64 `json:"max_tokens,omitempty"`
-type VertexAIOptions struct {
- APIKey string `json:"api_key,omitempty"`
- Project string `json:"project,omitempty"`
- Location string `json:"location,omitempty"`
+ // Used by anthropic models that can reason to indicate if the model should think.
+ Think bool `json:"think,omitempty"`
}
type ProviderConfig struct {
- ID provider.InferenceProvider `json:"id,omitempty" jsonschema:"title=Provider ID,description=Unique identifier for the provider"`
- BaseURL string `json:"base_url,omitempty" jsonschema:"title=Base URL,description=Base URL for the provider API (required for custom providers)"`
- ProviderType provider.Type `json:"provider_type" jsonschema:"title=Provider Type,description=Type of the provider (openai, anthropic, etc.)"`
- APIKey string `json:"api_key,omitempty" jsonschema:"title=API Key,description=API key for authenticating with the provider"`
- Disabled bool `json:"disabled,omitempty" jsonschema:"title=Disabled,description=Whether this provider is disabled,default=false"`
- ExtraHeaders map[string]string `json:"extra_headers,omitempty" jsonschema:"title=Extra Headers,description=Additional HTTP headers to send with requests"`
- // used for e.x for vertex to set the project
- ExtraParams map[string]string `json:"extra_params,omitempty" jsonschema:"title=Extra Parameters,description=Additional provider-specific parameters"`
-
- DefaultLargeModel string `json:"default_large_model,omitempty" jsonschema:"title=Default Large Model,description=Default model ID for large model type"`
- DefaultSmallModel string `json:"default_small_model,omitempty" jsonschema:"title=Default Small Model,description=Default model ID for small model type"`
-
- Models []Model `json:"models,omitempty" jsonschema:"title=Models,description=List of available models for this provider"`
-}
-
-type Agent struct {
- ID AgentID `json:"id,omitempty" jsonschema:"title=Agent ID,description=Unique identifier for the agent,enum=coder,enum=task"`
- Name string `json:"name,omitempty" jsonschema:"title=Name,description=Display name of the agent"`
- Description string `json:"description,omitempty" jsonschema:"title=Description,description=Description of what the agent does"`
- // This is the id of the system prompt used by the agent
- Disabled bool `json:"disabled,omitempty" jsonschema:"title=Disabled,description=Whether this agent is disabled,default=false"`
-
- Model ModelType `json:"model" jsonschema:"title=Model Type,description=Type of model to use (large or small),enum=large,enum=small"`
+ // The provider's id.
+ ID string `json:"id,omitempty"`
+ // The provider's API endpoint.
+ BaseURL string `json:"base_url,omitempty"`
+ // The provider type, e.g. "openai", "anthropic", etc. if empty it defaults to openai.
+ Type provider.Type `json:"type,omitempty"`
+ // The provider's API key.
+ APIKey string `json:"api_key,omitempty"`
+ // Marks the provider as disabled.
+ Disable bool `json:"disable,omitempty"`
- // The available tools for the agent
- // if this is nil, all tools are available
- AllowedTools []string `json:"allowed_tools,omitempty" jsonschema:"title=Allowed Tools,description=List of tools this agent is allowed to use (if nil all tools are allowed)"`
+ // Extra headers to send with each request to the provider.
+ ExtraHeaders map[string]string
- // this tells us which MCPs are available for this agent
- // if this is empty all mcps are available
- // the string array is the list of tools from the AllowedMCP the agent has available
- // if the string array is nil, all tools from the AllowedMCP are available
- AllowedMCP map[string][]string `json:"allowed_mcp,omitempty" jsonschema:"title=Allowed MCP,description=Map of MCP servers this agent can use and their allowed tools"`
+ // Used to pass extra parameters to the provider.
+ ExtraParams map[string]string `json:"-"`
- // The list of LSPs that this agent can use
- // if this is nil, all LSPs are available
- AllowedLSP []string `json:"allowed_lsp,omitempty" jsonschema:"title=Allowed LSP,description=List of LSP servers this agent can use (if nil all LSPs are allowed)"`
-
- // Overrides the context paths for this agent
- ContextPaths []string `json:"context_paths,omitempty" jsonschema:"title=Context Paths,description=Custom context paths for this agent (additive to global context paths)"`
+ // The provider models
+ Models []provider.Model `json:"models,omitempty"`
}
type MCPType string
@@ -127,1358 +85,205 @@ const (
MCPHttp MCPType = "http"
)
-type MCP struct {
- Command string `json:"command,omitempty" jsonschema:"title=Command,description=Command to execute for stdio MCP servers"`
- Env []string `json:"env,omitempty" jsonschema:"title=Environment,description=Environment variables for the MCP server"`
- Args []string `json:"args,omitempty" jsonschema:"title=Arguments,description=Command line arguments for the MCP server"`
- Type MCPType `json:"type" jsonschema:"title=Type,description=Type of MCP connection,enum=stdio,enum=sse,enum=http,default=stdio"`
- URL string `json:"url,omitempty" jsonschema:"title=URL,description=URL for SSE MCP servers"`
+type MCPConfig struct {
+ Command string `json:"command,omitempty" `
+ Env []string `json:"env,omitempty"`
+ Args []string `json:"args,omitempty"`
+ Type MCPType `json:"type"`
+ URL string `json:"url,omitempty"`
+
// TODO: maybe make it possible to get the value from the env
- Headers map[string]string `json:"headers,omitempty" jsonschema:"title=Headers,description=HTTP headers for SSE MCP servers"`
+ Headers map[string]string `json:"headers,omitempty"`
}
type LSPConfig struct {
- Disabled bool `json:"enabled,omitempty" jsonschema:"title=Enabled,description=Whether this LSP server is enabled,default=true"`
- Command string `json:"command" jsonschema:"title=Command,description=Command to execute for the LSP server"`
- Args []string `json:"args,omitempty" jsonschema:"title=Arguments,description=Command line arguments for the LSP server"`
- Options any `json:"options,omitempty" jsonschema:"title=Options,description=LSP server specific options"`
+ Disabled bool `json:"enabled,omitempty"`
+ Command string `json:"command"`
+ Args []string `json:"args,omitempty"`
+ Options any `json:"options,omitempty"`
}
type TUIOptions struct {
- CompactMode bool `json:"compact_mode" jsonschema:"title=Compact Mode,description=Enable compact mode for the TUI,default=false"`
+ CompactMode bool `json:"compact_mode,omitempty"`
// Here we can add themes later or any TUI related options
}
type Options struct {
- ContextPaths []string `json:"context_paths,omitempty" jsonschema:"title=Context Paths,description=List of paths to search for context files"`
- TUI TUIOptions `json:"tui,omitempty" jsonschema:"title=TUI Options,description=Terminal UI configuration options"`
- Debug bool `json:"debug,omitempty" jsonschema:"title=Debug,description=Enable debug logging,default=false"`
- DebugLSP bool `json:"debug_lsp,omitempty" jsonschema:"title=Debug LSP,description=Enable LSP debug logging,default=false"`
- DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty" jsonschema:"title=Disable Auto Summarize,description=Disable automatic conversation summarization,default=false"`
+ ContextPaths []string `json:"context_paths,omitempty"`
+ TUI *TUIOptions `json:"tui,omitempty"`
+ Debug bool `json:"debug,omitempty"`
+ DebugLSP bool `json:"debug_lsp,omitempty"`
+ DisableAutoSummarize bool `json:"disable_auto_summarize,omitempty"`
// Relative to the cwd
- DataDirectory string `json:"data_directory,omitempty" jsonschema:"title=Data Directory,description=Directory for storing application data,default=.crush"`
+ DataDirectory string `json:"data_directory,omitempty"`
}
-type PreferredModel struct {
- ModelID string `json:"model_id" jsonschema:"title=Model ID,description=ID of the preferred model"`
- Provider provider.InferenceProvider `json:"provider" jsonschema:"title=Provider,description=Provider for the preferred model"`
- // ReasoningEffort overrides the default reasoning effort for this model
- ReasoningEffort string `json:"reasoning_effort,omitempty" jsonschema:"title=Reasoning Effort,description=Override reasoning effort for this model"`
- // MaxTokens overrides the default max tokens for this model
- MaxTokens int64 `json:"max_tokens,omitempty" jsonschema:"title=Max Tokens,description=Override max tokens for this model,minimum=1"`
+type MCPs map[string]MCPConfig
- // Think indicates if the model should think, only applicable for anthropic reasoning models
- Think bool `json:"think,omitempty" jsonschema:"title=Think,description=Enable thinking for reasoning models,default=false"`
-}
-
-type PreferredModels struct {
- Large PreferredModel `json:"large,omitempty" jsonschema:"title=Large Model,description=Preferred model configuration for large model type"`
- Small PreferredModel `json:"small,omitempty" jsonschema:"title=Small Model,description=Preferred model configuration for small model type"`
-}
-
-type Config struct {
- Models PreferredModels `json:"models,omitempty" jsonschema:"title=Models,description=Preferred model configurations for large and small model types"`
- // List of configured providers
- Providers map[provider.InferenceProvider]ProviderConfig `json:"providers,omitempty" jsonschema:"title=Providers,description=LLM provider configurations"`
-
- // List of configured agents
- Agents map[AgentID]Agent `json:"agents,omitempty" jsonschema:"title=Agents,description=Agent configurations for different tasks"`
-
- // List of configured MCPs
- MCP map[string]MCP `json:"mcp,omitempty" jsonschema:"title=MCP,description=Model Control Protocol server configurations"`
-
- // List of configured LSPs
- LSP map[string]LSPConfig `json:"lsp,omitempty" jsonschema:"title=LSP,description=Language Server Protocol configurations"`
-
- // Miscellaneous options
- Options Options `json:"options,omitempty" jsonschema:"title=Options,description=General application options and settings"`
-}
-
-var (
- instance *Config // The single instance of the Singleton
- cwd string
- once sync.Once // Ensures the initialization happens only once
-
-)
-
-func readConfigFile(path string) (*Config, error) {
- var cfg *Config
- if _, err := os.Stat(path); err != nil && !os.IsNotExist(err) {
- // some other error occurred while checking the file
- return nil, err
- } else if err == nil {
- // config file exists, read it
- file, err := os.ReadFile(path)
- if err != nil {
- return nil, err
- }
- cfg = &Config{}
- if err := json.Unmarshal(file, cfg); err != nil {
- return nil, err
- }
- } else {
- // config file does not exist, create a new one
- cfg = &Config{}
- }
- return cfg, nil
+type MCP struct {
+ Name string `json:"name"`
+ MCP MCPConfig `json:"mcp"`
}
-func loadConfig(cwd string, debug bool) (*Config, error) {
- // First read the global config file
- cfgPath := ConfigPath()
-
- cfg := defaultConfigBasedOnEnv()
- cfg.Options.Debug = debug
- defaultLevel := slog.LevelInfo
- if cfg.Options.Debug {
- defaultLevel = slog.LevelDebug
+func (m MCPs) Sorted() []MCP {
+ sorted := make([]MCP, 0, len(m))
+ for k, v := range m {
+ sorted = append(sorted, MCP{
+ Name: k,
+ MCP: v,
+ })
}
- if os.Getenv("CRUSH_DEV_DEBUG") == "true" {
- loggingFile := fmt.Sprintf("%s/%s", cfg.Options.DataDirectory, "debug.log")
-
- // if file does not exist create it
- if _, err := os.Stat(loggingFile); os.IsNotExist(err) {
- if err := os.MkdirAll(cfg.Options.DataDirectory, 0o755); err != nil {
- return cfg, fmt.Errorf("failed to create directory: %w", err)
- }
- if _, err := os.Create(loggingFile); err != nil {
- return cfg, fmt.Errorf("failed to create log file: %w", err)
- }
- }
-
- messagesPath := fmt.Sprintf("%s/%s", cfg.Options.DataDirectory, "messages")
-
- if _, err := os.Stat(messagesPath); os.IsNotExist(err) {
- if err := os.MkdirAll(messagesPath, 0o756); err != nil {
- return cfg, fmt.Errorf("failed to create directory: %w", err)
- }
- }
- logging.MessageDir = messagesPath
-
- sloggingFileWriter, err := os.OpenFile(loggingFile, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0o666)
- if err != nil {
- return cfg, fmt.Errorf("failed to open log file: %w", err)
- }
- // Configure logger
- logger := slog.New(slog.NewTextHandler(sloggingFileWriter, &slog.HandlerOptions{
- Level: defaultLevel,
- }))
- slog.SetDefault(logger)
- } else {
- // Configure logger
- logger := slog.New(slog.NewTextHandler(logging.NewWriter(), &slog.HandlerOptions{
- Level: defaultLevel,
- }))
- slog.SetDefault(logger)
- }
-
- priorityOrderedConfigFiles := []string{
- cfgPath, // Global config file
- filepath.Join(cwd, "crush.json"), // Local config file
- filepath.Join(cwd, ".crush.json"), // Local config file
- }
-
- configs := make([]*Config, 0)
- for _, path := range priorityOrderedConfigFiles {
- localConfig, err := readConfigFile(path)
- if err != nil {
- return nil, fmt.Errorf("failed to read config file %s: %w", path, err)
- }
- if localConfig != nil {
- // If the config file was read successfully, add it to the list
- configs = append(configs, localConfig)
- }
- }
-
- // merge options
- mergeOptions(cfg, configs...)
-
- mergeProviderConfigs(cfg, configs...)
- // no providers found the app is not initialized yet
- if len(cfg.Providers) == 0 {
- return cfg, nil
- }
- preferredProvider := getPreferredProvider(cfg.Providers)
- if preferredProvider != nil {
- cfg.Models = PreferredModels{
- Large: PreferredModel{
- ModelID: preferredProvider.DefaultLargeModel,
- Provider: preferredProvider.ID,
- },
- Small: PreferredModel{
- ModelID: preferredProvider.DefaultSmallModel,
- Provider: preferredProvider.ID,
- },
- }
- } else {
- // No valid providers found, set empty models
- cfg.Models = PreferredModels{}
- }
-
- mergeModels(cfg, configs...)
-
- agents := map[AgentID]Agent{
- AgentCoder: {
- ID: AgentCoder,
- Name: "Coder",
- Description: "An agent that helps with executing coding tasks.",
- Model: LargeModel,
- ContextPaths: cfg.Options.ContextPaths,
- // All tools allowed
- },
- AgentTask: {
- ID: AgentTask,
- Name: "Task",
- Description: "An agent that helps with searching for context and finding implementation details.",
- Model: LargeModel,
- ContextPaths: cfg.Options.ContextPaths,
- AllowedTools: []string{
- "glob",
- "grep",
- "ls",
- "sourcegraph",
- "view",
- },
- // NO MCPs or LSPs by default
- AllowedMCP: map[string][]string{},
- AllowedLSP: []string{},
- },
- }
- cfg.Agents = agents
- mergeAgents(cfg, configs...)
- mergeMCPs(cfg, configs...)
- mergeLSPs(cfg, configs...)
-
- // Validate the final configuration
- if err := cfg.Validate(); err != nil {
- return cfg, fmt.Errorf("configuration validation failed: %w", err)
- }
-
- return cfg, nil
-}
-
-func Init(workingDir string, debug bool) (*Config, error) {
- var err error
- once.Do(func() {
- cwd = workingDir
- instance, err = loadConfig(cwd, debug)
- if err != nil {
- logging.Error("Failed to load config", "error", err)
- }
+ slices.SortFunc(sorted, func(a, b MCP) int {
+ return strings.Compare(a.Name, b.Name)
})
-
- return instance, err
+ return sorted
}
-func Get() *Config {
- if instance == nil {
- // TODO: Handle this better
- panic("Config not initialized. Call InitConfig first.")
- }
- return instance
-}
-
-func getPreferredProvider(configuredProviders map[provider.InferenceProvider]ProviderConfig) *ProviderConfig {
- providers := Providers()
- for _, p := range providers {
- if providerConfig, ok := configuredProviders[p.ID]; ok && !providerConfig.Disabled {
- return &providerConfig
- }
- }
- // if none found return the first configured provider
- for _, providerConfig := range configuredProviders {
- if !providerConfig.Disabled {
- return &providerConfig
- }
- }
- return nil
-}
-
-func mergeProviderConfig(p provider.InferenceProvider, base, other ProviderConfig) ProviderConfig {
- if other.APIKey != "" {
- base.APIKey = other.APIKey
- }
- // Only change these options if the provider is not a known provider
- if !slices.Contains(provider.KnownProviders(), p) {
- if other.BaseURL != "" {
- base.BaseURL = other.BaseURL
- }
- if other.ProviderType != "" {
- base.ProviderType = other.ProviderType
- }
- if len(other.ExtraHeaders) > 0 {
- if base.ExtraHeaders == nil {
- base.ExtraHeaders = make(map[string]string)
- }
- maps.Copy(base.ExtraHeaders, other.ExtraHeaders)
- }
- if len(other.ExtraParams) > 0 {
- if base.ExtraParams == nil {
- base.ExtraParams = make(map[string]string)
- }
- maps.Copy(base.ExtraParams, other.ExtraParams)
- }
- }
-
- if other.Disabled {
- base.Disabled = other.Disabled
- }
-
- if other.DefaultLargeModel != "" {
- base.DefaultLargeModel = other.DefaultLargeModel
- }
- // Add new models if they don't exist
- if other.Models != nil {
- for _, model := range other.Models {
- // check if the model already exists
- exists := false
- for _, existingModel := range base.Models {
- if existingModel.ID == model.ID {
- exists = true
- break
- }
- }
- if !exists {
- base.Models = append(base.Models, model)
- }
- }
- }
-
- return base
-}
+type LSPs map[string]LSPConfig
-func validateProvider(p provider.InferenceProvider, providerConfig ProviderConfig) error {
- if !slices.Contains(provider.KnownProviders(), p) {
- if providerConfig.ProviderType != provider.TypeOpenAI {
- return errors.New("invalid provider type: " + string(providerConfig.ProviderType))
- }
- if providerConfig.BaseURL == "" {
- return errors.New("base URL must be set for custom providers")
- }
- if providerConfig.APIKey == "" {
- return errors.New("API key must be set for custom providers")
- }
- }
- return nil
+type LSP struct {
+ Name string `json:"name"`
+ LSP LSPConfig `json:"lsp"`
}
-func mergeModels(base *Config, others ...*Config) {
- for _, cfg := range others {
- if cfg == nil {
- continue
- }
- if cfg.Models.Large.ModelID != "" && cfg.Models.Large.Provider != "" {
- base.Models.Large = cfg.Models.Large
- }
-
- if cfg.Models.Small.ModelID != "" && cfg.Models.Small.Provider != "" {
- base.Models.Small = cfg.Models.Small
- }
+func (l LSPs) Sorted() []LSP {
+ sorted := make([]LSP, 0, len(l))
+ for k, v := range l {
+ sorted = append(sorted, LSP{
+ Name: k,
+ LSP: v,
+ })
}
+ slices.SortFunc(sorted, func(a, b LSP) int {
+ return strings.Compare(a.Name, b.Name)
+ })
+ return sorted
}
-func mergeOptions(base *Config, others ...*Config) {
- for _, cfg := range others {
- if cfg == nil {
- continue
- }
- baseOptions := base.Options
- other := cfg.Options
- if len(other.ContextPaths) > 0 {
- baseOptions.ContextPaths = append(baseOptions.ContextPaths, other.ContextPaths...)
- }
+type Agent struct {
+ ID string `json:"id,omitempty"`
+ Name string `json:"name,omitempty"`
+ Description string `json:"description,omitempty"`
+ // This is the id of the system prompt used by the agent
+ Disabled bool `json:"disabled,omitempty"`
- if other.TUI.CompactMode {
- baseOptions.TUI.CompactMode = other.TUI.CompactMode
- }
+ Model SelectedModelType `json:"model"`
- if other.Debug {
- baseOptions.Debug = other.Debug
- }
+ // The available tools for the agent
+ // if this is nil, all tools are available
+ AllowedTools []string `json:"allowed_tools,omitempty"`
- if other.DebugLSP {
- baseOptions.DebugLSP = other.DebugLSP
- }
+ // this tells us which MCPs are available for this agent
+ // if this is empty all mcps are available
+ // the string array is the list of tools from the AllowedMCP the agent has available
+ // if the string array is nil, all tools from the AllowedMCP are available
+ AllowedMCP map[string][]string `json:"allowed_mcp,omitempty"`
- if other.DisableAutoSummarize {
- baseOptions.DisableAutoSummarize = other.DisableAutoSummarize
- }
+ // The list of LSPs that this agent can use
+ // if this is nil, all LSPs are available
+ AllowedLSP []string `json:"allowed_lsp,omitempty"`
- if other.DataDirectory != "" {
- baseOptions.DataDirectory = other.DataDirectory
- }
- base.Options = baseOptions
- }
+ // Overrides the context paths for this agent
+ ContextPaths []string `json:"context_paths,omitempty"`
}
-func mergeAgents(base *Config, others ...*Config) {
- for _, cfg := range others {
- if cfg == nil {
- continue
- }
- for agentID, newAgent := range cfg.Agents {
- if _, ok := base.Agents[agentID]; !ok {
- newAgent.ID = agentID
- if newAgent.Model == "" {
- newAgent.Model = LargeModel
- }
- if len(newAgent.ContextPaths) > 0 {
- newAgent.ContextPaths = append(base.Options.ContextPaths, newAgent.ContextPaths...)
- } else {
- newAgent.ContextPaths = base.Options.ContextPaths
- }
- base.Agents[agentID] = newAgent
- } else {
- baseAgent := base.Agents[agentID]
+// Config holds the configuration for crush.
+type Config struct {
+ // We currently only support large/small as values here.
+ Models map[SelectedModelType]SelectedModel `json:"models,omitempty"`
- if agentID == AgentCoder || agentID == AgentTask {
- if newAgent.Model != "" {
- baseAgent.Model = newAgent.Model
- }
- if newAgent.AllowedMCP != nil {
- baseAgent.AllowedMCP = newAgent.AllowedMCP
- }
- if newAgent.AllowedLSP != nil {
- baseAgent.AllowedLSP = newAgent.AllowedLSP
- }
- // Context paths are additive for known agents too
- if len(newAgent.ContextPaths) > 0 {
- baseAgent.ContextPaths = append(baseAgent.ContextPaths, newAgent.ContextPaths...)
- }
- } else {
- if newAgent.Name != "" {
- baseAgent.Name = newAgent.Name
- }
- if newAgent.Description != "" {
- baseAgent.Description = newAgent.Description
- }
- if newAgent.Model != "" {
- baseAgent.Model = newAgent.Model
- } else if baseAgent.Model == "" {
- baseAgent.Model = LargeModel
- }
+ // The providers that are configured
+ Providers map[string]ProviderConfig `json:"providers,omitempty"`
- baseAgent.Disabled = newAgent.Disabled
+ MCP MCPs `json:"mcp,omitempty"`
- if newAgent.AllowedTools != nil {
- baseAgent.AllowedTools = newAgent.AllowedTools
- }
- if newAgent.AllowedMCP != nil {
- baseAgent.AllowedMCP = newAgent.AllowedMCP
- }
- if newAgent.AllowedLSP != nil {
- baseAgent.AllowedLSP = newAgent.AllowedLSP
- }
- if len(newAgent.ContextPaths) > 0 {
- baseAgent.ContextPaths = append(baseAgent.ContextPaths, newAgent.ContextPaths...)
- }
- }
+ LSP LSPs `json:"lsp,omitempty"`
- base.Agents[agentID] = baseAgent
- }
- }
- }
-}
+ Options *Options `json:"options,omitempty"`
-func mergeMCPs(base *Config, others ...*Config) {
- for _, cfg := range others {
- if cfg == nil {
- continue
- }
- maps.Copy(base.MCP, cfg.MCP)
- }
+ // Internal
+ workingDir string `json:"-"`
+ // TODO: most likely remove this concept when I come back to it
+ Agents map[string]Agent `json:"-"`
+ // TODO: find a better way to do this this should probably not be part of the config
+ resolver VariableResolver
}
-func mergeLSPs(base *Config, others ...*Config) {
- for _, cfg := range others {
- if cfg == nil {
- continue
- }
- maps.Copy(base.LSP, cfg.LSP)
- }
+func (c *Config) WorkingDir() string {
+ return c.workingDir
}
-func mergeProviderConfigs(base *Config, others ...*Config) {
- for _, cfg := range others {
- if cfg == nil {
- continue
- }
- for providerName, p := range cfg.Providers {
- p.ID = providerName
- if _, ok := base.Providers[providerName]; !ok {
- if slices.Contains(provider.KnownProviders(), providerName) {
- providers := Providers()
- for _, providerDef := range providers {
- if providerDef.ID == providerName {
- logging.Info("Using default provider config for", "provider", providerName)
- baseProvider := getDefaultProviderConfig(providerDef, providerDef.APIKey)
- base.Providers[providerName] = mergeProviderConfig(providerName, baseProvider, p)
- break
- }
- }
- } else {
- base.Providers[providerName] = p
- }
- } else {
- base.Providers[providerName] = mergeProviderConfig(providerName, base.Providers[providerName], p)
- }
- }
- }
-
- finalProviders := make(map[provider.InferenceProvider]ProviderConfig)
- for providerName, providerConfig := range base.Providers {
- err := validateProvider(providerName, providerConfig)
- if err != nil {
- logging.Warn("Skipping provider", "name", providerName, "error", err)
- continue // Skip invalid providers
+func (c *Config) EnabledProviders() []ProviderConfig {
+ enabled := make([]ProviderConfig, 0, len(c.Providers))
+ for _, p := range c.Providers {
+ if !p.Disable {
+ enabled = append(enabled, p)
}
- finalProviders[providerName] = providerConfig
}
- base.Providers = finalProviders
+ return enabled
}
-func providerDefaultConfig(providerID provider.InferenceProvider) ProviderConfig {
- switch providerID {
- case provider.InferenceProviderAnthropic:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeAnthropic,
- }
- case provider.InferenceProviderOpenAI:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeOpenAI,
- }
- case provider.InferenceProviderGemini:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeGemini,
- }
- case provider.InferenceProviderBedrock:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeBedrock,
- }
- case provider.InferenceProviderAzure:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeAzure,
- }
- case provider.InferenceProviderOpenRouter:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeOpenAI,
- BaseURL: "https://openrouter.ai/api/v1",
- ExtraHeaders: map[string]string{
- "HTTP-Referer": "crush.charm.land",
- "X-Title": "Crush",
- },
- }
- case provider.InferenceProviderXAI:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeXAI,
- BaseURL: "https://api.x.ai/v1",
- }
- case provider.InferenceProviderVertexAI:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeVertexAI,
- }
- default:
- return ProviderConfig{
- ID: providerID,
- ProviderType: provider.TypeOpenAI,
- }
- }
+// IsConfigured return true if at least one provider is configured
+func (c *Config) IsConfigured() bool {
+ return len(c.EnabledProviders()) > 0
}
-func getDefaultProviderConfig(p provider.Provider, apiKey string) ProviderConfig {
- providerConfig := providerDefaultConfig(p.ID)
- providerConfig.APIKey = apiKey
- providerConfig.DefaultLargeModel = p.DefaultLargeModelID
- providerConfig.DefaultSmallModel = p.DefaultSmallModelID
- baseURL := p.APIEndpoint
- if strings.HasPrefix(baseURL, "$") {
- envVar := strings.TrimPrefix(baseURL, "$")
- baseURL = os.Getenv(envVar)
- }
- providerConfig.BaseURL = baseURL
- for _, model := range p.Models {
- configModel := Model{
- ID: model.ID,
- Name: model.Name,
- CostPer1MIn: model.CostPer1MIn,
- CostPer1MOut: model.CostPer1MOut,
- CostPer1MInCached: model.CostPer1MInCached,
- CostPer1MOutCached: model.CostPer1MOutCached,
- ContextWindow: model.ContextWindow,
- DefaultMaxTokens: model.DefaultMaxTokens,
- CanReason: model.CanReason,
- SupportsImages: model.SupportsImages,
- }
- // Set reasoning effort for reasoning models
- if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
- configModel.HasReasoningEffort = model.HasReasoningEffort
- configModel.ReasoningEffort = model.DefaultReasoningEffort
- }
- providerConfig.Models = append(providerConfig.Models, configModel)
- }
- return providerConfig
-}
-
-func defaultConfigBasedOnEnv() *Config {
- cfg := &Config{
- Options: Options{
- DataDirectory: defaultDataDirectory,
- ContextPaths: defaultContextPaths,
- },
- Providers: make(map[provider.InferenceProvider]ProviderConfig),
- Agents: make(map[AgentID]Agent),
- LSP: make(map[string]LSPConfig),
- MCP: make(map[string]MCP),
- }
-
- providers := Providers()
-
- for _, p := range providers {
- if strings.HasPrefix(p.APIKey, "$") {
- envVar := strings.TrimPrefix(p.APIKey, "$")
- if apiKey := os.Getenv(envVar); apiKey != "" {
- cfg.Providers[p.ID] = getDefaultProviderConfig(p, apiKey)
- }
- }
- }
- // TODO: support local models
-
- if useVertexAI := os.Getenv("GOOGLE_GENAI_USE_VERTEXAI"); useVertexAI == "true" {
- providerConfig := providerDefaultConfig(provider.InferenceProviderVertexAI)
- providerConfig.ExtraParams = map[string]string{
- "project": os.Getenv("GOOGLE_CLOUD_PROJECT"),
- "location": os.Getenv("GOOGLE_CLOUD_LOCATION"),
- }
- // Find the VertexAI provider definition to get default models
- for _, p := range providers {
- if p.ID == provider.InferenceProviderVertexAI {
- providerConfig.DefaultLargeModel = p.DefaultLargeModelID
- providerConfig.DefaultSmallModel = p.DefaultSmallModelID
- for _, model := range p.Models {
- configModel := Model{
- ID: model.ID,
- Name: model.Name,
- CostPer1MIn: model.CostPer1MIn,
- CostPer1MOut: model.CostPer1MOut,
- CostPer1MInCached: model.CostPer1MInCached,
- CostPer1MOutCached: model.CostPer1MOutCached,
- ContextWindow: model.ContextWindow,
- DefaultMaxTokens: model.DefaultMaxTokens,
- CanReason: model.CanReason,
- SupportsImages: model.SupportsImages,
- }
- // Set reasoning effort for reasoning models
- if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
- configModel.HasReasoningEffort = model.HasReasoningEffort
- configModel.ReasoningEffort = model.DefaultReasoningEffort
- }
- providerConfig.Models = append(providerConfig.Models, configModel)
- }
- break
- }
- }
- cfg.Providers[provider.InferenceProviderVertexAI] = providerConfig
- }
-
- if hasAWSCredentials() {
- providerConfig := providerDefaultConfig(provider.InferenceProviderBedrock)
- providerConfig.ExtraParams = map[string]string{
- "region": os.Getenv("AWS_DEFAULT_REGION"),
- }
- if providerConfig.ExtraParams["region"] == "" {
- providerConfig.ExtraParams["region"] = os.Getenv("AWS_REGION")
- }
- // Find the Bedrock provider definition to get default models
- for _, p := range providers {
- if p.ID == provider.InferenceProviderBedrock {
- providerConfig.DefaultLargeModel = p.DefaultLargeModelID
- providerConfig.DefaultSmallModel = p.DefaultSmallModelID
- for _, model := range p.Models {
- configModel := Model{
- ID: model.ID,
- Name: model.Name,
- CostPer1MIn: model.CostPer1MIn,
- CostPer1MOut: model.CostPer1MOut,
- CostPer1MInCached: model.CostPer1MInCached,
- CostPer1MOutCached: model.CostPer1MOutCached,
- ContextWindow: model.ContextWindow,
- DefaultMaxTokens: model.DefaultMaxTokens,
- CanReason: model.CanReason,
- SupportsImages: model.SupportsImages,
- }
- // Set reasoning effort for reasoning models
- if model.HasReasoningEffort && model.DefaultReasoningEffort != "" {
- configModel.HasReasoningEffort = model.HasReasoningEffort
- configModel.ReasoningEffort = model.DefaultReasoningEffort
- }
- providerConfig.Models = append(providerConfig.Models, configModel)
- }
- break
+func (c *Config) GetModel(provider, model string) *provider.Model {
+ if providerConfig, ok := c.Providers[provider]; ok {
+ for _, m := range providerConfig.Models {
+ if m.ID == model {
+ return &m
}
}
- cfg.Providers[provider.InferenceProviderBedrock] = providerConfig
- }
- return cfg
-}
-
-func hasAWSCredentials() bool {
- if os.Getenv("AWS_ACCESS_KEY_ID") != "" && os.Getenv("AWS_SECRET_ACCESS_KEY") != "" {
- return true
}
-
- if os.Getenv("AWS_PROFILE") != "" || os.Getenv("AWS_DEFAULT_PROFILE") != "" {
- return true
- }
-
- if os.Getenv("AWS_REGION") != "" || os.Getenv("AWS_DEFAULT_REGION") != "" {
- return true
- }
-
- if os.Getenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
- os.Getenv("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
- return true
- }
-
- return false
-}
-
-func WorkingDirectory() string {
- return cwd
+ return nil
}
-// TODO: Handle error state
-
-func GetAgentModel(agentID AgentID) Model {
- cfg := Get()
- agent, ok := cfg.Agents[agentID]
- if !ok {
- logging.Error("Agent not found", "agent_id", agentID)
- return Model{}
- }
-
- var model PreferredModel
- switch agent.Model {
- case LargeModel:
- model = cfg.Models.Large
- case SmallModel:
- model = cfg.Models.Small
- default:
- logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
- model = cfg.Models.Large // Fallback to large model
- }
- providerConfig, ok := cfg.Providers[model.Provider]
+func (c *Config) GetProviderForModel(modelType SelectedModelType) *ProviderConfig {
+ model, ok := c.Models[modelType]
if !ok {
- logging.Error("Provider not found for agent", "agent_id", agentID, "provider", model.Provider)
- return Model{}
+ return nil
}
-
- for _, m := range providerConfig.Models {
- if m.ID == model.ModelID {
- return m
- }
+ if providerConfig, ok := c.Providers[model.Provider]; ok {
+ return &providerConfig
}
-
- logging.Error("Model not found for agent", "agent_id", agentID, "model", agent.Model)
- return Model{}
+ return nil
}
-func GetAgentProvider(agentID AgentID) ProviderConfig {
- cfg := Get()
- agent, ok := cfg.Agents[agentID]
+func (c *Config) GetModelByType(modelType SelectedModelType) *provider.Model {
+ model, ok := c.Models[modelType]
if !ok {
- logging.Error("Agent not found", "agent_id", agentID)
- return ProviderConfig{}
+ return nil
}
-
- var model PreferredModel
- switch agent.Model {
- case LargeModel:
- model = cfg.Models.Large
- case SmallModel:
- model = cfg.Models.Small
- default:
- logging.Warn("Unknown model type for agent", "agent_id", agentID, "model_type", agent.Model)
- model = cfg.Models.Large // Fallback to large model
- }
-
- providerConfig, ok := cfg.Providers[model.Provider]
- if !ok {
- logging.Error("Provider not found for agent", "agent_id", agentID, "provider", model.Provider)
- return ProviderConfig{}
- }
-
- return providerConfig
+ return c.GetModel(model.Provider, model.Model)
}
-func GetProviderModel(provider provider.InferenceProvider, modelID string) Model {
- cfg := Get()
- providerConfig, ok := cfg.Providers[provider]
+func (c *Config) LargeModel() *provider.Model {
+ model, ok := c.Models[SelectedModelTypeLarge]
if !ok {
- logging.Error("Provider not found", "provider", provider)
- return Model{}
+ return nil
}
-
- for _, model := range providerConfig.Models {
- if model.ID == modelID {
- return model
- }
- }
-
- logging.Error("Model not found for provider", "provider", provider, "model_id", modelID)
- return Model{}
+ return c.GetModel(model.Provider, model.Model)
}
-func GetModel(modelType ModelType) Model {
- cfg := Get()
- var model PreferredModel
- switch modelType {
- case LargeModel:
- model = cfg.Models.Large
- case SmallModel:
- model = cfg.Models.Small
- default:
- model = cfg.Models.Large // Fallback to large model
- }
- providerConfig, ok := cfg.Providers[model.Provider]
+func (c *Config) SmallModel() *provider.Model {
+ model, ok := c.Models[SelectedModelTypeSmall]
if !ok {
- return Model{}
+ return nil
}
-
- for _, m := range providerConfig.Models {
- if m.ID == model.ModelID {
- return m
- }
- }
- return Model{}
+ return c.GetModel(model.Provider, model.Model)
}
-func UpdatePreferredModel(modelType ModelType, model PreferredModel) error {
- cfg := Get()
- switch modelType {
- case LargeModel:
- cfg.Models.Large = model
- case SmallModel:
- cfg.Models.Small = model
- default:
- return fmt.Errorf("unknown model type: %s", modelType)
+func (c *Config) Resolve(key string) (string, error) {
+ if c.resolver == nil {
+ return "", fmt.Errorf("no variable resolver configured")
}
- return nil
-}
-
-// ValidationError represents a configuration validation error
-type ValidationError struct {
- Field string
- Message string
+ return c.resolver.ResolveValue(key)
}
-func (e ValidationError) Error() string {
- return fmt.Sprintf("validation error in %s: %s", e.Field, e.Message)
-}
-
-// ValidationErrors represents multiple validation errors
-type ValidationErrors []ValidationError
-
-func (e ValidationErrors) Error() string {
- if len(e) == 0 {
- return "no validation errors"
- }
- if len(e) == 1 {
- return e[0].Error()
- }
-
- var messages []string
- for _, err := range e {
- messages = append(messages, err.Error())
- }
- return fmt.Sprintf("multiple validation errors: %s", strings.Join(messages, "; "))
-}
-
-// HasErrors returns true if there are any validation errors
-func (e ValidationErrors) HasErrors() bool {
- return len(e) > 0
-}
-
-// Add appends a new validation error
-func (e *ValidationErrors) Add(field, message string) {
- *e = append(*e, ValidationError{Field: field, Message: message})
-}
-
-// Validate performs comprehensive validation of the configuration
-func (c *Config) Validate() error {
- var errors ValidationErrors
-
- // Validate providers
- c.validateProviders(&errors)
-
- // Validate models
- c.validateModels(&errors)
-
- // Validate agents
- c.validateAgents(&errors)
-
- // Validate options
- c.validateOptions(&errors)
-
- // Validate MCP configurations
- c.validateMCPs(&errors)
-
- // Validate LSP configurations
- c.validateLSPs(&errors)
-
- // Validate cross-references
- c.validateCrossReferences(&errors)
-
- // Validate completeness
- c.validateCompleteness(&errors)
-
- if errors.HasErrors() {
- return errors
- }
-
+// TODO: maybe handle this better
+func UpdatePreferredModel(modelType SelectedModelType, model SelectedModel) error {
+ cfg := Get()
+ cfg.Models[modelType] = model
return nil
}
-
-// validateProviders validates all provider configurations
-func (c *Config) validateProviders(errors *ValidationErrors) {
- if c.Providers == nil {
- c.Providers = make(map[provider.InferenceProvider]ProviderConfig)
- }
-
- knownProviders := provider.KnownProviders()
- validTypes := []provider.Type{
- provider.TypeOpenAI,
- provider.TypeAnthropic,
- provider.TypeGemini,
- provider.TypeAzure,
- provider.TypeBedrock,
- provider.TypeVertexAI,
- provider.TypeXAI,
- }
-
- for providerID, providerConfig := range c.Providers {
- fieldPrefix := fmt.Sprintf("providers.%s", providerID)
-
- // Validate API key for non-disabled providers
- if !providerConfig.Disabled && providerConfig.APIKey == "" {
- // Special case for AWS Bedrock and VertexAI which may use other auth methods
- if providerID != provider.InferenceProviderBedrock && providerID != provider.InferenceProviderVertexAI {
- errors.Add(fieldPrefix+".api_key", "API key is required for non-disabled providers")
- }
- }
-
- // Validate provider type
- validType := slices.Contains(validTypes, providerConfig.ProviderType)
- if !validType {
- errors.Add(fieldPrefix+".provider_type", fmt.Sprintf("invalid provider type: %s", providerConfig.ProviderType))
- }
-
- // Validate custom providers
- isKnownProvider := slices.Contains(knownProviders, providerID)
-
- if !isKnownProvider {
- // Custom provider validation
- if providerConfig.BaseURL == "" {
- errors.Add(fieldPrefix+".base_url", "BaseURL is required for custom providers")
- }
- if providerConfig.ProviderType != provider.TypeOpenAI {
- errors.Add(fieldPrefix+".provider_type", "custom providers currently only support OpenAI type")
- }
- }
-
- // Validate models
- modelIDs := make(map[string]bool)
- for i, model := range providerConfig.Models {
- modelFieldPrefix := fmt.Sprintf("%s.models[%d]", fieldPrefix, i)
-
- // Check for duplicate model IDs
- if modelIDs[model.ID] {
- errors.Add(modelFieldPrefix+".id", fmt.Sprintf("duplicate model ID: %s", model.ID))
- }
- modelIDs[model.ID] = true
-
- // Validate required model fields
- if model.ID == "" {
- errors.Add(modelFieldPrefix+".id", "model ID is required")
- }
- if model.Name == "" {
- errors.Add(modelFieldPrefix+".name", "model name is required")
- }
- if model.ContextWindow <= 0 {
- errors.Add(modelFieldPrefix+".context_window", "context window must be positive")
- }
- if model.DefaultMaxTokens <= 0 {
- errors.Add(modelFieldPrefix+".default_max_tokens", "default max tokens must be positive")
- }
- if model.DefaultMaxTokens > model.ContextWindow {
- errors.Add(modelFieldPrefix+".default_max_tokens", "default max tokens cannot exceed context window")
- }
-
- // Validate cost fields
- if model.CostPer1MIn < 0 {
- errors.Add(modelFieldPrefix+".cost_per_1m_in", "cost per 1M input tokens cannot be negative")
- }
- if model.CostPer1MOut < 0 {
- errors.Add(modelFieldPrefix+".cost_per_1m_out", "cost per 1M output tokens cannot be negative")
- }
- if model.CostPer1MInCached < 0 {
- errors.Add(modelFieldPrefix+".cost_per_1m_in_cached", "cached cost per 1M input tokens cannot be negative")
- }
- if model.CostPer1MOutCached < 0 {
- errors.Add(modelFieldPrefix+".cost_per_1m_out_cached", "cached cost per 1M output tokens cannot be negative")
- }
- }
-
- // Validate default model references
- if providerConfig.DefaultLargeModel != "" {
- if !modelIDs[providerConfig.DefaultLargeModel] {
- errors.Add(fieldPrefix+".default_large_model", fmt.Sprintf("default large model '%s' not found in provider models", providerConfig.DefaultLargeModel))
- }
- }
- if providerConfig.DefaultSmallModel != "" {
- if !modelIDs[providerConfig.DefaultSmallModel] {
- errors.Add(fieldPrefix+".default_small_model", fmt.Sprintf("default small model '%s' not found in provider models", providerConfig.DefaultSmallModel))
- }
- }
-
- // Validate provider-specific requirements
- c.validateProviderSpecific(providerID, providerConfig, errors)
- }
-}
-
-// validateProviderSpecific validates provider-specific requirements
-func (c *Config) validateProviderSpecific(providerID provider.InferenceProvider, providerConfig ProviderConfig, errors *ValidationErrors) {
- fieldPrefix := fmt.Sprintf("providers.%s", providerID)
-
- switch providerID {
- case provider.InferenceProviderVertexAI:
- if !providerConfig.Disabled {
- if providerConfig.ExtraParams == nil {
- errors.Add(fieldPrefix+".extra_params", "VertexAI requires extra_params configuration")
- } else {
- if providerConfig.ExtraParams["project"] == "" {
- errors.Add(fieldPrefix+".extra_params.project", "VertexAI requires project parameter")
- }
- if providerConfig.ExtraParams["location"] == "" {
- errors.Add(fieldPrefix+".extra_params.location", "VertexAI requires location parameter")
- }
- }
- }
- case provider.InferenceProviderBedrock:
- if !providerConfig.Disabled {
- if providerConfig.ExtraParams == nil || providerConfig.ExtraParams["region"] == "" {
- errors.Add(fieldPrefix+".extra_params.region", "Bedrock requires region parameter")
- }
- // Check for AWS credentials in environment
- if !hasAWSCredentials() {
- errors.Add(fieldPrefix, "Bedrock requires AWS credentials in environment")
- }
- }
- }
-}
-
-// validateModels validates preferred model configurations
-func (c *Config) validateModels(errors *ValidationErrors) {
- // Validate large model
- if c.Models.Large.ModelID != "" || c.Models.Large.Provider != "" {
- if c.Models.Large.ModelID == "" {
- errors.Add("models.large.model_id", "large model ID is required when provider is set")
- }
- if c.Models.Large.Provider == "" {
- errors.Add("models.large.provider", "large model provider is required when model ID is set")
- }
-
- // Check if provider exists and is not disabled
- if providerConfig, exists := c.Providers[c.Models.Large.Provider]; exists {
- if providerConfig.Disabled {
- errors.Add("models.large.provider", "large model provider is disabled")
- }
-
- // Check if model exists in provider
- modelExists := false
- for _, model := range providerConfig.Models {
- if model.ID == c.Models.Large.ModelID {
- modelExists = true
- break
- }
- }
- if !modelExists {
- errors.Add("models.large.model_id", fmt.Sprintf("large model '%s' not found in provider '%s'", c.Models.Large.ModelID, c.Models.Large.Provider))
- }
- } else {
- errors.Add("models.large.provider", fmt.Sprintf("large model provider '%s' not found", c.Models.Large.Provider))
- }
- }
-
- // Validate small model
- if c.Models.Small.ModelID != "" || c.Models.Small.Provider != "" {
- if c.Models.Small.ModelID == "" {
- errors.Add("models.small.model_id", "small model ID is required when provider is set")
- }
- if c.Models.Small.Provider == "" {
- errors.Add("models.small.provider", "small model provider is required when model ID is set")
- }
-
- // Check if provider exists and is not disabled
- if providerConfig, exists := c.Providers[c.Models.Small.Provider]; exists {
- if providerConfig.Disabled {
- errors.Add("models.small.provider", "small model provider is disabled")
- }
-
- // Check if model exists in provider
- modelExists := false
- for _, model := range providerConfig.Models {
- if model.ID == c.Models.Small.ModelID {
- modelExists = true
- break
- }
- }
- if !modelExists {
- errors.Add("models.small.model_id", fmt.Sprintf("small model '%s' not found in provider '%s'", c.Models.Small.ModelID, c.Models.Small.Provider))
- }
- } else {
- errors.Add("models.small.provider", fmt.Sprintf("small model provider '%s' not found", c.Models.Small.Provider))
- }
- }
-}
-
-// validateAgents validates agent configurations
-func (c *Config) validateAgents(errors *ValidationErrors) {
- if c.Agents == nil {
- c.Agents = make(map[AgentID]Agent)
- }
-
- validTools := []string{
- "bash", "edit", "fetch", "glob", "grep", "ls", "sourcegraph", "view", "write", "agent",
- }
-
- for agentID, agent := range c.Agents {
- fieldPrefix := fmt.Sprintf("agents.%s", agentID)
-
- // Validate agent ID consistency
- if agent.ID != agentID {
- errors.Add(fieldPrefix+".id", fmt.Sprintf("agent ID mismatch: expected '%s', got '%s'", agentID, agent.ID))
- }
-
- // Validate required fields
- if agent.ID == "" {
- errors.Add(fieldPrefix+".id", "agent ID is required")
- }
- if agent.Name == "" {
- errors.Add(fieldPrefix+".name", "agent name is required")
- }
-
- // Validate model type
- if agent.Model != LargeModel && agent.Model != SmallModel {
- errors.Add(fieldPrefix+".model", fmt.Sprintf("invalid model type: %s (must be 'large' or 'small')", agent.Model))
- }
-
- // Validate allowed tools
- if agent.AllowedTools != nil {
- for i, tool := range agent.AllowedTools {
- validTool := slices.Contains(validTools, tool)
- if !validTool {
- errors.Add(fmt.Sprintf("%s.allowed_tools[%d]", fieldPrefix, i), fmt.Sprintf("unknown tool: %s", tool))
- }
- }
- }
-
- // Validate MCP references
- if agent.AllowedMCP != nil {
- for mcpName := range agent.AllowedMCP {
- if _, exists := c.MCP[mcpName]; !exists {
- errors.Add(fieldPrefix+".allowed_mcp", fmt.Sprintf("referenced MCP '%s' not found", mcpName))
- }
- }
- }
-
- // Validate LSP references
- if agent.AllowedLSP != nil {
- for _, lspName := range agent.AllowedLSP {
- if _, exists := c.LSP[lspName]; !exists {
- errors.Add(fieldPrefix+".allowed_lsp", fmt.Sprintf("referenced LSP '%s' not found", lspName))
- }
- }
- }
-
- // Validate context paths (basic path validation)
- for i, contextPath := range agent.ContextPaths {
- if contextPath == "" {
- errors.Add(fmt.Sprintf("%s.context_paths[%d]", fieldPrefix, i), "context path cannot be empty")
- }
- // Check for invalid characters in path
- if strings.Contains(contextPath, "\x00") {
- errors.Add(fmt.Sprintf("%s.context_paths[%d]", fieldPrefix, i), "context path contains invalid characters")
- }
- }
-
- // Validate known agents maintain their core properties
- if agentID == AgentCoder {
- if agent.Name != "Coder" {
- errors.Add(fieldPrefix+".name", "coder agent name cannot be changed")
- }
- if agent.Description != "An agent that helps with executing coding tasks." {
- errors.Add(fieldPrefix+".description", "coder agent description cannot be changed")
- }
- } else if agentID == AgentTask {
- if agent.Name != "Task" {
- errors.Add(fieldPrefix+".name", "task agent name cannot be changed")
- }
- if agent.Description != "An agent that helps with searching for context and finding implementation details." {
- errors.Add(fieldPrefix+".description", "task agent description cannot be changed")
- }
- expectedTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
- if agent.AllowedTools != nil && !slices.Equal(agent.AllowedTools, expectedTools) {
- errors.Add(fieldPrefix+".allowed_tools", "task agent allowed tools cannot be changed")
- }
- }
- }
-}
-
-// validateOptions validates configuration options
-func (c *Config) validateOptions(errors *ValidationErrors) {
- // Validate data directory
- if c.Options.DataDirectory == "" {
- errors.Add("options.data_directory", "data directory is required")
- }
-
- // Validate context paths
- for i, contextPath := range c.Options.ContextPaths {
- if contextPath == "" {
- errors.Add(fmt.Sprintf("options.context_paths[%d]", i), "context path cannot be empty")
- }
- if strings.Contains(contextPath, "\x00") {
- errors.Add(fmt.Sprintf("options.context_paths[%d]", i), "context path contains invalid characters")
- }
- }
-}
-
-// validateMCPs validates MCP configurations
-func (c *Config) validateMCPs(errors *ValidationErrors) {
- if c.MCP == nil {
- c.MCP = make(map[string]MCP)
- }
-
- for mcpName, mcpConfig := range c.MCP {
- fieldPrefix := fmt.Sprintf("mcp.%s", mcpName)
-
- // Validate MCP type
- if mcpConfig.Type != MCPStdio && mcpConfig.Type != MCPSse && mcpConfig.Type != MCPHttp {
- errors.Add(fieldPrefix+".type", fmt.Sprintf("invalid MCP type: %s (must be 'stdio' or 'sse' or 'http')", mcpConfig.Type))
- }
-
- // Validate based on type
- if mcpConfig.Type == MCPStdio {
- if mcpConfig.Command == "" {
- errors.Add(fieldPrefix+".command", "command is required for stdio MCP")
- }
- } else if mcpConfig.Type == MCPSse {
- if mcpConfig.URL == "" {
- errors.Add(fieldPrefix+".url", "URL is required for SSE MCP")
- }
- }
- }
-}
-
-// validateLSPs validates LSP configurations
-func (c *Config) validateLSPs(errors *ValidationErrors) {
- if c.LSP == nil {
- c.LSP = make(map[string]LSPConfig)
- }
-
- for lspName, lspConfig := range c.LSP {
- fieldPrefix := fmt.Sprintf("lsp.%s", lspName)
-
- if lspConfig.Command == "" {
- errors.Add(fieldPrefix+".command", "command is required for LSP")
- }
- }
-}
-
-// validateCrossReferences validates cross-references between different config sections
-func (c *Config) validateCrossReferences(errors *ValidationErrors) {
- // Validate that agents can use their assigned model types
- for agentID, agent := range c.Agents {
- fieldPrefix := fmt.Sprintf("agents.%s", agentID)
-
- var preferredModel PreferredModel
- switch agent.Model {
- case LargeModel:
- preferredModel = c.Models.Large
- case SmallModel:
- preferredModel = c.Models.Small
- }
-
- if preferredModel.Provider != "" {
- if providerConfig, exists := c.Providers[preferredModel.Provider]; exists {
- if providerConfig.Disabled {
- errors.Add(fieldPrefix+".model", fmt.Sprintf("agent cannot use model type '%s' because provider '%s' is disabled", agent.Model, preferredModel.Provider))
- }
- }
- }
- }
-}
-
-// validateCompleteness validates that the configuration is complete and usable
-func (c *Config) validateCompleteness(errors *ValidationErrors) {
- // Check for at least one valid, non-disabled provider
- hasValidProvider := false
- for _, providerConfig := range c.Providers {
- if !providerConfig.Disabled {
- hasValidProvider = true
- break
- }
- }
- if !hasValidProvider {
- errors.Add("providers", "at least one non-disabled provider is required")
- }
-
- // Check that default agents exist
- if _, exists := c.Agents[AgentCoder]; !exists {
- errors.Add("agents", "coder agent is required")
- }
- if _, exists := c.Agents[AgentTask]; !exists {
- errors.Add("agents", "task agent is required")
- }
-
- // Check that preferred models are set if providers exist
- if hasValidProvider {
- if c.Models.Large.ModelID == "" || c.Models.Large.Provider == "" {
- errors.Add("models.large", "large preferred model must be configured when providers are available")
- }
- if c.Models.Small.ModelID == "" || c.Models.Small.Provider == "" {
- errors.Add("models.small", "small preferred model must be configured when providers are available")
- }
- }
-}
-
-// JSONSchemaExtend adds custom schema properties for AgentID
-func (AgentID) JSONSchemaExtend(schema *jsonschema.Schema) {
- schema.Enum = []any{
- string(AgentCoder),
- string(AgentTask),
- }
-}
-
-// JSONSchemaExtend adds custom schema properties for ModelType
-func (ModelType) JSONSchemaExtend(schema *jsonschema.Schema) {
- schema.Enum = []any{
- string(LargeModel),
- string(SmallModel),
- }
-}
-
-// JSONSchemaExtend adds custom schema properties for MCPType
-func (MCPType) JSONSchemaExtend(schema *jsonschema.Schema) {
- schema.Enum = []any{
- string(MCPStdio),
- string(MCPSse),
- }
-}
@@ -1,2075 +0,0 @@
-package config
-
-import (
- "encoding/json"
- "os"
- "path/filepath"
- "sync"
- "testing"
-
- "github.com/charmbracelet/crush/internal/fur/provider"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-func reset() {
- // Clear all environment variables that could affect config
- envVarsToUnset := []string{
- // API Keys
- "ANTHROPIC_API_KEY",
- "OPENAI_API_KEY",
- "GEMINI_API_KEY",
- "XAI_API_KEY",
- "OPENROUTER_API_KEY",
-
- // Google Cloud / VertexAI
- "GOOGLE_GENAI_USE_VERTEXAI",
- "GOOGLE_CLOUD_PROJECT",
- "GOOGLE_CLOUD_LOCATION",
-
- // AWS Credentials
- "AWS_ACCESS_KEY_ID",
- "AWS_SECRET_ACCESS_KEY",
- "AWS_REGION",
- "AWS_DEFAULT_REGION",
- "AWS_PROFILE",
- "AWS_DEFAULT_PROFILE",
- "AWS_CONTAINER_CREDENTIALS_RELATIVE_URI",
- "AWS_CONTAINER_CREDENTIALS_FULL_URI",
-
- // Other
- "CRUSH_DEV_DEBUG",
- }
-
- for _, envVar := range envVarsToUnset {
- os.Unsetenv(envVar)
- }
-
- // Reset singleton
- once = sync.Once{}
- instance = nil
- cwd = ""
- testConfigDir = ""
-
- // Enable mock providers for all tests to avoid API calls
- UseMockProviders = true
- ResetProviders()
-}
-
-// Core Configuration Loading Tests
-
-func TestInit_ValidWorkingDirectory(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.NotNil(t, cfg)
- assert.Equal(t, cwdDir, WorkingDirectory())
- assert.Equal(t, defaultDataDirectory, cfg.Options.DataDirectory)
- assert.Equal(t, defaultContextPaths, cfg.Options.ContextPaths)
-}
-
-func TestInit_WithDebugFlag(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- cfg, err := Init(cwdDir, true)
-
- require.NoError(t, err)
- assert.True(t, cfg.Options.Debug)
-}
-
-func TestInit_SingletonBehavior(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- cfg1, err1 := Init(cwdDir, false)
- cfg2, err2 := Init(cwdDir, false)
-
- require.NoError(t, err1)
- require.NoError(t, err2)
- assert.Same(t, cfg1, cfg2)
-}
-
-func TestGet_BeforeInitialization(t *testing.T) {
- reset()
-
- assert.Panics(t, func() {
- Get()
- })
-}
-
-func TestGet_AfterInitialization(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- cfg1, err := Init(cwdDir, false)
- require.NoError(t, err)
-
- cfg2 := Get()
- assert.Same(t, cfg1, cfg2)
-}
-
-func TestLoadConfig_NoConfigFiles(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 0)
- assert.Equal(t, defaultContextPaths, cfg.Options.ContextPaths)
-}
-
-func TestLoadConfig_OnlyGlobalConfig(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- CostPer1MIn: 30.0,
- CostPer1MOut: 60.0,
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- CostPer1MIn: 1.0,
- CostPer1MOut: 2.0,
- ContextWindow: 4096,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- Options: Options{
- ContextPaths: []string{"custom-context.md"},
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
-
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 1)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderOpenAI)
- assert.Contains(t, cfg.Options.ContextPaths, "custom-context.md")
-}
-
-func TestLoadConfig_OnlyLocalConfig(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- localConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderAnthropic: {
- ID: provider.InferenceProviderAnthropic,
- APIKey: "local-key",
- ProviderType: provider.TypeAnthropic,
- DefaultLargeModel: "claude-3-opus",
- DefaultSmallModel: "claude-3-haiku",
- Models: []Model{
- {
- ID: "claude-3-opus",
- Name: "Claude 3 Opus",
- CostPer1MIn: 15.0,
- CostPer1MOut: 75.0,
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "claude-3-haiku",
- Name: "Claude 3 Haiku",
- CostPer1MIn: 0.25,
- CostPer1MOut: 1.25,
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- Options: Options{
- TUI: TUIOptions{CompactMode: true},
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err := json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 1)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderAnthropic)
- assert.True(t, cfg.Options.TUI.CompactMode)
-}
-
-func TestLoadConfig_BothGlobalAndLocal(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "global-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- CostPer1MIn: 30.0,
- CostPer1MOut: 60.0,
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- CostPer1MIn: 1.0,
- CostPer1MOut: 2.0,
- ContextWindow: 4096,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- Options: Options{
- ContextPaths: []string{"global-context.md"},
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- localConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- APIKey: "local-key", // Override global
- },
- provider.InferenceProviderAnthropic: {
- ID: provider.InferenceProviderAnthropic,
- APIKey: "anthropic-key",
- ProviderType: provider.TypeAnthropic,
- DefaultLargeModel: "claude-3-opus",
- DefaultSmallModel: "claude-3-haiku",
- Models: []Model{
- {
- ID: "claude-3-opus",
- Name: "Claude 3 Opus",
- CostPer1MIn: 15.0,
- CostPer1MOut: 75.0,
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "claude-3-haiku",
- Name: "Claude 3 Haiku",
- CostPer1MIn: 0.25,
- CostPer1MOut: 1.25,
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- Options: Options{
- ContextPaths: []string{"local-context.md"},
- TUI: TUIOptions{CompactMode: true},
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 2)
-
- openaiProvider := cfg.Providers[provider.InferenceProviderOpenAI]
- assert.Equal(t, "local-key", openaiProvider.APIKey)
-
- assert.Contains(t, cfg.Providers, provider.InferenceProviderAnthropic)
-
- assert.Contains(t, cfg.Options.ContextPaths, "global-context.md")
- assert.Contains(t, cfg.Options.ContextPaths, "local-context.md")
- assert.True(t, cfg.Options.TUI.CompactMode)
-}
-
-func TestLoadConfig_MalformedGlobalJSON(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- require.NoError(t, os.WriteFile(configPath, []byte(`{invalid json`), 0o644))
-
- _, err := Init(cwdDir, false)
- assert.Error(t, err)
-}
-
-func TestLoadConfig_MalformedLocalJSON(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- require.NoError(t, os.WriteFile(localConfigPath, []byte(`{invalid json`), 0o644))
-
- _, err := Init(cwdDir, false)
- assert.Error(t, err)
-}
-
-func TestConfigWithoutEnv(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- cfg, _ := Init(cwdDir, false)
- assert.Len(t, cfg.Providers, 0)
-}
-
-func TestConfigWithEnv(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-anthropic-key")
- os.Setenv("OPENAI_API_KEY", "test-openai-key")
- os.Setenv("GEMINI_API_KEY", "test-gemini-key")
- os.Setenv("XAI_API_KEY", "test-xai-key")
- os.Setenv("OPENROUTER_API_KEY", "test-openrouter-key")
-
- cfg, _ := Init(cwdDir, false)
- assert.Len(t, cfg.Providers, 5)
-}
-
-// Environment Variable Tests
-
-func TestEnvVars_NoEnvironmentVariables(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 0)
-}
-
-func TestEnvVars_AllSupportedAPIKeys(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-anthropic-key")
- os.Setenv("OPENAI_API_KEY", "test-openai-key")
- os.Setenv("GEMINI_API_KEY", "test-gemini-key")
- os.Setenv("XAI_API_KEY", "test-xai-key")
- os.Setenv("OPENROUTER_API_KEY", "test-openrouter-key")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 5)
-
- anthropicProvider := cfg.Providers[provider.InferenceProviderAnthropic]
- assert.Equal(t, "test-anthropic-key", anthropicProvider.APIKey)
- assert.Equal(t, provider.TypeAnthropic, anthropicProvider.ProviderType)
-
- openaiProvider := cfg.Providers[provider.InferenceProviderOpenAI]
- assert.Equal(t, "test-openai-key", openaiProvider.APIKey)
- assert.Equal(t, provider.TypeOpenAI, openaiProvider.ProviderType)
-
- geminiProvider := cfg.Providers[provider.InferenceProviderGemini]
- assert.Equal(t, "test-gemini-key", geminiProvider.APIKey)
- assert.Equal(t, provider.TypeGemini, geminiProvider.ProviderType)
-
- xaiProvider := cfg.Providers[provider.InferenceProviderXAI]
- assert.Equal(t, "test-xai-key", xaiProvider.APIKey)
- assert.Equal(t, provider.TypeXAI, xaiProvider.ProviderType)
-
- openrouterProvider := cfg.Providers[provider.InferenceProviderOpenRouter]
- assert.Equal(t, "test-openrouter-key", openrouterProvider.APIKey)
- assert.Equal(t, provider.TypeOpenAI, openrouterProvider.ProviderType)
- assert.Equal(t, "https://openrouter.ai/api/v1", openrouterProvider.BaseURL)
-}
-
-func TestEnvVars_PartialEnvironmentVariables(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-anthropic-key")
- os.Setenv("OPENAI_API_KEY", "test-openai-key")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 2)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderAnthropic)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderOpenAI)
- assert.NotContains(t, cfg.Providers, provider.InferenceProviderGemini)
-}
-
-func TestEnvVars_VertexAIConfiguration(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("GOOGLE_GENAI_USE_VERTEXAI", "true")
- os.Setenv("GOOGLE_CLOUD_PROJECT", "test-project")
- os.Setenv("GOOGLE_CLOUD_LOCATION", "us-central1")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderVertexAI)
-
- vertexProvider := cfg.Providers[provider.InferenceProviderVertexAI]
- assert.Equal(t, provider.TypeVertexAI, vertexProvider.ProviderType)
- assert.Equal(t, "test-project", vertexProvider.ExtraParams["project"])
- assert.Equal(t, "us-central1", vertexProvider.ExtraParams["location"])
-}
-
-func TestEnvVars_VertexAIWithoutUseFlag(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("GOOGLE_CLOUD_PROJECT", "test-project")
- os.Setenv("GOOGLE_CLOUD_LOCATION", "us-central1")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.NotContains(t, cfg.Providers, provider.InferenceProviderVertexAI)
-}
-
-func TestEnvVars_AWSBedrockWithAccessKeys(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("AWS_ACCESS_KEY_ID", "test-access-key")
- os.Setenv("AWS_SECRET_ACCESS_KEY", "test-secret-key")
- os.Setenv("AWS_DEFAULT_REGION", "us-east-1")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderBedrock)
-
- bedrockProvider := cfg.Providers[provider.InferenceProviderBedrock]
- assert.Equal(t, provider.TypeBedrock, bedrockProvider.ProviderType)
- assert.Equal(t, "us-east-1", bedrockProvider.ExtraParams["region"])
-}
-
-func TestEnvVars_AWSBedrockWithProfile(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("AWS_PROFILE", "test-profile")
- os.Setenv("AWS_REGION", "eu-west-1")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderBedrock)
-
- bedrockProvider := cfg.Providers[provider.InferenceProviderBedrock]
- assert.Equal(t, "eu-west-1", bedrockProvider.ExtraParams["region"])
-}
-
-func TestEnvVars_AWSBedrockWithContainerCredentials(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI", "/v2/credentials/test")
- os.Setenv("AWS_DEFAULT_REGION", "ap-southeast-1")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderBedrock)
-}
-
-func TestEnvVars_AWSBedrockRegionPriority(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("AWS_ACCESS_KEY_ID", "test-key")
- os.Setenv("AWS_SECRET_ACCESS_KEY", "test-secret")
- os.Setenv("AWS_DEFAULT_REGION", "us-west-2")
- os.Setenv("AWS_REGION", "us-east-1")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- bedrockProvider := cfg.Providers[provider.InferenceProviderBedrock]
- assert.Equal(t, "us-west-2", bedrockProvider.ExtraParams["region"])
-}
-
-func TestEnvVars_AWSBedrockFallbackRegion(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("AWS_ACCESS_KEY_ID", "test-key")
- os.Setenv("AWS_SECRET_ACCESS_KEY", "test-secret")
- os.Setenv("AWS_REGION", "us-east-1")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- bedrockProvider := cfg.Providers[provider.InferenceProviderBedrock]
- assert.Equal(t, "us-east-1", bedrockProvider.ExtraParams["region"])
-}
-
-func TestEnvVars_NoAWSCredentials(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.NotContains(t, cfg.Providers, provider.InferenceProviderBedrock)
-}
-
-func TestEnvVars_CustomEnvironmentVariables(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "resolved-anthropic-key")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- if len(cfg.Providers) > 0 {
- if anthropicProvider, exists := cfg.Providers[provider.InferenceProviderAnthropic]; exists {
- assert.Equal(t, "resolved-anthropic-key", anthropicProvider.APIKey)
- }
- }
-}
-
-func TestEnvVars_CombinedEnvironmentVariables(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-anthropic")
- os.Setenv("OPENAI_API_KEY", "test-openai")
- os.Setenv("GOOGLE_GENAI_USE_VERTEXAI", "true")
- os.Setenv("GOOGLE_CLOUD_PROJECT", "test-project")
- os.Setenv("GOOGLE_CLOUD_LOCATION", "us-central1")
- os.Setenv("AWS_ACCESS_KEY_ID", "test-aws-key")
- os.Setenv("AWS_SECRET_ACCESS_KEY", "test-aws-secret")
- os.Setenv("AWS_DEFAULT_REGION", "us-west-1")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- expectedProviders := []provider.InferenceProvider{
- provider.InferenceProviderAnthropic,
- provider.InferenceProviderOpenAI,
- provider.InferenceProviderVertexAI,
- provider.InferenceProviderBedrock,
- }
-
- for _, expectedProvider := range expectedProviders {
- assert.Contains(t, cfg.Providers, expectedProvider)
- }
-}
-
-func TestHasAWSCredentials_AccessKeys(t *testing.T) {
- reset()
-
- os.Setenv("AWS_ACCESS_KEY_ID", "test-key")
- os.Setenv("AWS_SECRET_ACCESS_KEY", "test-secret")
-
- assert.True(t, hasAWSCredentials())
-}
-
-func TestHasAWSCredentials_Profile(t *testing.T) {
- reset()
-
- os.Setenv("AWS_PROFILE", "test-profile")
-
- assert.True(t, hasAWSCredentials())
-}
-
-func TestHasAWSCredentials_DefaultProfile(t *testing.T) {
- reset()
-
- os.Setenv("AWS_DEFAULT_PROFILE", "default")
-
- assert.True(t, hasAWSCredentials())
-}
-
-func TestHasAWSCredentials_Region(t *testing.T) {
- reset()
-
- os.Setenv("AWS_REGION", "us-east-1")
-
- assert.True(t, hasAWSCredentials())
-}
-
-func TestHasAWSCredentials_ContainerCredentials(t *testing.T) {
- reset()
-
- os.Setenv("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI", "/v2/credentials/test")
-
- assert.True(t, hasAWSCredentials())
-}
-
-func TestHasAWSCredentials_NoCredentials(t *testing.T) {
- reset()
-
- assert.False(t, hasAWSCredentials())
-}
-
-func TestProviderMerging_GlobalToBase(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "global-openai-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- ContextWindow: 4096,
- DefaultMaxTokens: 2048,
- },
- },
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 1)
-
- openaiProvider := cfg.Providers[provider.InferenceProviderOpenAI]
- assert.Equal(t, "global-openai-key", openaiProvider.APIKey)
- assert.Equal(t, "gpt-4", openaiProvider.DefaultLargeModel)
- assert.Equal(t, "gpt-4o", openaiProvider.DefaultSmallModel)
- assert.GreaterOrEqual(t, len(openaiProvider.Models), 2)
-}
-
-func TestProviderMerging_LocalToBase(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- localConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderAnthropic: {
- ID: provider.InferenceProviderAnthropic,
- APIKey: "local-anthropic-key",
- ProviderType: provider.TypeAnthropic,
- DefaultLargeModel: "claude-3-opus",
- DefaultSmallModel: "claude-3-haiku",
- Models: []Model{
- {
- ID: "claude-3-opus",
- Name: "Claude 3 Opus",
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- CostPer1MIn: 15.0,
- CostPer1MOut: 75.0,
- },
- {
- ID: "claude-3-haiku",
- Name: "Claude 3 Haiku",
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- CostPer1MIn: 0.25,
- CostPer1MOut: 1.25,
- },
- },
- },
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err := json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Len(t, cfg.Providers, 1)
-
- anthropicProvider := cfg.Providers[provider.InferenceProviderAnthropic]
- assert.Equal(t, "local-anthropic-key", anthropicProvider.APIKey)
- assert.Equal(t, "claude-3-opus", anthropicProvider.DefaultLargeModel)
- assert.Equal(t, "claude-3-5-haiku-20241022", anthropicProvider.DefaultSmallModel)
- assert.GreaterOrEqual(t, len(anthropicProvider.Models), 2)
-}
-
-func TestProviderMerging_ConflictingSettings(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "global-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- ContextWindow: 4096,
- DefaultMaxTokens: 2048,
- },
- {
- ID: "gpt-4-turbo",
- Name: "GPT-4 Turbo",
- ContextWindow: 128000,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- // Create local config that overrides
- localConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- APIKey: "local-key",
- DefaultLargeModel: "gpt-4-turbo",
- },
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- openaiProvider := cfg.Providers[provider.InferenceProviderOpenAI]
- assert.Equal(t, "local-key", openaiProvider.APIKey)
- assert.Equal(t, "gpt-4-turbo", openaiProvider.DefaultLargeModel)
- assert.False(t, openaiProvider.Disabled)
- assert.Equal(t, "gpt-4o", openaiProvider.DefaultSmallModel)
-}
-
-func TestProviderMerging_CustomVsKnownProviders(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- customProviderID := provider.InferenceProvider("custom-provider")
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "openai-key",
- BaseURL: "should-not-override",
- ProviderType: provider.TypeAnthropic,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- ContextWindow: 4096,
- DefaultMaxTokens: 2048,
- },
- },
- },
- customProviderID: {
- ID: customProviderID,
- APIKey: "custom-key",
- BaseURL: "https://custom.api.com",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "custom-large",
- DefaultSmallModel: "custom-small",
- Models: []Model{
- {
- ID: "custom-large",
- Name: "Custom Large",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "custom-small",
- Name: "Custom Small",
- ContextWindow: 4096,
- DefaultMaxTokens: 2048,
- },
- },
- },
- },
- }
-
- localConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- BaseURL: "https://should-not-change.com",
- ProviderType: provider.TypeGemini, // Should not change
- },
- customProviderID: {
- BaseURL: "https://updated-custom.api.com",
- ProviderType: provider.TypeOpenAI,
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- openaiProvider := cfg.Providers[provider.InferenceProviderOpenAI]
- assert.NotEqual(t, "https://should-not-change.com", openaiProvider.BaseURL)
- assert.NotEqual(t, provider.TypeGemini, openaiProvider.ProviderType)
-
- customProvider := cfg.Providers[customProviderID]
- assert.Equal(t, "custom-key", customProvider.APIKey)
- assert.Equal(t, "https://updated-custom.api.com", customProvider.BaseURL)
- assert.Equal(t, provider.TypeOpenAI, customProvider.ProviderType)
-}
-
-func TestProviderValidation_CustomProviderMissingBaseURL(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- customProviderID := provider.InferenceProvider("custom-provider")
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- customProviderID: {
- ID: customProviderID,
- APIKey: "custom-key",
- ProviderType: provider.TypeOpenAI,
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.NotContains(t, cfg.Providers, customProviderID)
-}
-
-func TestProviderValidation_CustomProviderMissingAPIKey(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- customProviderID := provider.InferenceProvider("custom-provider")
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- customProviderID: {
- ID: customProviderID,
- BaseURL: "https://custom.api.com",
- ProviderType: provider.TypeOpenAI,
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.NotContains(t, cfg.Providers, customProviderID)
-}
-
-func TestProviderValidation_CustomProviderInvalidType(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- customProviderID := provider.InferenceProvider("custom-provider")
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- customProviderID: {
- ID: customProviderID,
- APIKey: "custom-key",
- BaseURL: "https://custom.api.com",
- ProviderType: provider.Type("invalid-type"),
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.NotContains(t, cfg.Providers, customProviderID)
-}
-
-func TestProviderValidation_KnownProviderValid(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "openai-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- ContextWindow: 4096,
- DefaultMaxTokens: 2048,
- },
- },
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderOpenAI)
-}
-
-func TestProviderValidation_DisabledProvider(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "openai-key",
- ProviderType: provider.TypeOpenAI,
- Disabled: true,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- ContextWindow: 4096,
- DefaultMaxTokens: 2048,
- },
- },
- },
- provider.InferenceProviderAnthropic: {
- ID: provider.InferenceProviderAnthropic,
- APIKey: "anthropic-key",
- ProviderType: provider.TypeAnthropic,
- Disabled: false, // This one is enabled
- DefaultLargeModel: "claude-3-opus",
- DefaultSmallModel: "claude-3-haiku",
- Models: []Model{
- {
- ID: "claude-3-opus",
- Name: "Claude 3 Opus",
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "claude-3-haiku",
- Name: "Claude 3 Haiku",
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderOpenAI)
- assert.True(t, cfg.Providers[provider.InferenceProviderOpenAI].Disabled)
- assert.Contains(t, cfg.Providers, provider.InferenceProviderAnthropic)
- assert.False(t, cfg.Providers[provider.InferenceProviderAnthropic].Disabled)
-}
-
-func TestProviderModels_AddingNewModels(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "openai-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-4-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- }
-
- localConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- Models: []Model{
- {
- ID: "gpt-4-turbo",
- Name: "GPT-4 Turbo",
- ContextWindow: 128000,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- openaiProvider := cfg.Providers[provider.InferenceProviderOpenAI]
- assert.GreaterOrEqual(t, len(openaiProvider.Models), 2)
-
- modelIDs := make([]string, len(openaiProvider.Models))
- for i, model := range openaiProvider.Models {
- modelIDs[i] = model.ID
- }
- assert.Contains(t, modelIDs, "gpt-4")
- assert.Contains(t, modelIDs, "gpt-4-turbo")
-}
-
-func TestProviderModels_DuplicateModelHandling(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "openai-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-4",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- }
-
- localConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4 Updated",
- ContextWindow: 16384,
- DefaultMaxTokens: 8192,
- },
- },
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- openaiProvider := cfg.Providers[provider.InferenceProviderOpenAI]
- assert.GreaterOrEqual(t, len(openaiProvider.Models), 1)
-
- // Find the first model that matches our test data
- var testModel *Model
- for _, model := range openaiProvider.Models {
- if model.ID == "gpt-4" {
- testModel = &model
- break
- }
- }
-
- // If gpt-4 not found, use the first available model
- if testModel == nil {
- testModel = &openaiProvider.Models[0]
- }
-
- assert.NotEmpty(t, testModel.ID)
- assert.NotEmpty(t, testModel.Name)
- assert.Greater(t, testModel.ContextWindow, int64(0))
-}
-
-func TestProviderModels_ModelCostAndCapabilities(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "openai-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-4",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- CostPer1MIn: 30.0,
- CostPer1MOut: 60.0,
- CostPer1MInCached: 15.0,
- CostPer1MOutCached: 30.0,
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- CanReason: true,
- ReasoningEffort: "medium",
- SupportsImages: true,
- },
- },
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- openaiProvider := cfg.Providers[provider.InferenceProviderOpenAI]
- require.GreaterOrEqual(t, len(openaiProvider.Models), 1)
-
- // Find the test model or use the first one
- var testModel *Model
- for _, model := range openaiProvider.Models {
- if model.ID == "gpt-4" {
- testModel = &model
- break
- }
- }
-
- if testModel == nil {
- testModel = &openaiProvider.Models[0]
- }
-
- // Only test the custom properties if this is actually our test model
- if testModel.ID == "gpt-4" {
- assert.Equal(t, 30.0, testModel.CostPer1MIn)
- assert.Equal(t, 60.0, testModel.CostPer1MOut)
- assert.Equal(t, 15.0, testModel.CostPer1MInCached)
- assert.Equal(t, 30.0, testModel.CostPer1MOutCached)
- assert.True(t, testModel.CanReason)
- assert.Equal(t, "medium", testModel.ReasoningEffort)
- assert.True(t, testModel.SupportsImages)
- }
-}
-
-func TestDefaultAgents_CoderAgent(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Contains(t, cfg.Agents, AgentCoder)
-
- coderAgent := cfg.Agents[AgentCoder]
- assert.Equal(t, AgentCoder, coderAgent.ID)
- assert.Equal(t, "Coder", coderAgent.Name)
- assert.Equal(t, "An agent that helps with executing coding tasks.", coderAgent.Description)
- assert.Equal(t, LargeModel, coderAgent.Model)
- assert.False(t, coderAgent.Disabled)
- assert.Equal(t, cfg.Options.ContextPaths, coderAgent.ContextPaths)
- assert.Nil(t, coderAgent.AllowedTools)
-}
-
-func TestDefaultAgents_TaskAgent(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- assert.Contains(t, cfg.Agents, AgentTask)
-
- taskAgent := cfg.Agents[AgentTask]
- assert.Equal(t, AgentTask, taskAgent.ID)
- assert.Equal(t, "Task", taskAgent.Name)
- assert.Equal(t, "An agent that helps with searching for context and finding implementation details.", taskAgent.Description)
- assert.Equal(t, LargeModel, taskAgent.Model)
- assert.False(t, taskAgent.Disabled)
- assert.Equal(t, cfg.Options.ContextPaths, taskAgent.ContextPaths)
-
- expectedTools := []string{"glob", "grep", "ls", "sourcegraph", "view"}
- assert.Equal(t, expectedTools, taskAgent.AllowedTools)
-
- assert.Equal(t, map[string][]string{}, taskAgent.AllowedMCP)
- assert.Equal(t, []string{}, taskAgent.AllowedLSP)
-}
-
-func TestAgentMerging_CustomAgent(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Agents: map[AgentID]Agent{
- AgentID("custom-agent"): {
- ID: AgentID("custom-agent"),
- Name: "Custom Agent",
- Description: "A custom agent for testing",
- Model: SmallModel,
- AllowedTools: []string{"glob", "grep"},
- AllowedMCP: map[string][]string{"mcp1": {"tool1", "tool2"}},
- AllowedLSP: []string{"typescript", "go"},
- ContextPaths: []string{"custom-context.md"},
- },
- },
- MCP: map[string]MCP{
- "mcp1": {
- Type: MCPStdio,
- Command: "test-mcp-command",
- Args: []string{"--test"},
- },
- },
- LSP: map[string]LSPConfig{
- "typescript": {
- Command: "typescript-language-server",
- Args: []string{"--stdio"},
- },
- "go": {
- Command: "gopls",
- Args: []string{},
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- assert.Contains(t, cfg.Agents, AgentCoder)
- assert.Contains(t, cfg.Agents, AgentTask)
- assert.Contains(t, cfg.Agents, AgentID("custom-agent"))
-
- customAgent := cfg.Agents[AgentID("custom-agent")]
- assert.Equal(t, "Custom Agent", customAgent.Name)
- assert.Equal(t, "A custom agent for testing", customAgent.Description)
- assert.Equal(t, SmallModel, customAgent.Model)
- assert.Equal(t, []string{"glob", "grep"}, customAgent.AllowedTools)
- assert.Equal(t, map[string][]string{"mcp1": {"tool1", "tool2"}}, customAgent.AllowedMCP)
- assert.Equal(t, []string{"typescript", "go"}, customAgent.AllowedLSP)
- expectedContextPaths := append(defaultContextPaths, "custom-context.md")
- assert.Equal(t, expectedContextPaths, customAgent.ContextPaths)
-}
-
-func TestAgentMerging_ModifyDefaultCoderAgent(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Agents: map[AgentID]Agent{
- AgentCoder: {
- Model: SmallModel,
- AllowedMCP: map[string][]string{"mcp1": {"tool1"}},
- AllowedLSP: []string{"typescript"},
- ContextPaths: []string{"coder-specific.md"},
- },
- },
- MCP: map[string]MCP{
- "mcp1": {
- Type: MCPStdio,
- Command: "test-mcp-command",
- Args: []string{"--test"},
- },
- },
- LSP: map[string]LSPConfig{
- "typescript": {
- Command: "typescript-language-server",
- Args: []string{"--stdio"},
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- coderAgent := cfg.Agents[AgentCoder]
- assert.Equal(t, AgentCoder, coderAgent.ID)
- assert.Equal(t, "Coder", coderAgent.Name)
- assert.Equal(t, "An agent that helps with executing coding tasks.", coderAgent.Description)
-
- expectedContextPaths := append(cfg.Options.ContextPaths, "coder-specific.md")
- assert.Equal(t, expectedContextPaths, coderAgent.ContextPaths)
-
- assert.Equal(t, SmallModel, coderAgent.Model)
- assert.Equal(t, map[string][]string{"mcp1": {"tool1"}}, coderAgent.AllowedMCP)
- assert.Equal(t, []string{"typescript"}, coderAgent.AllowedLSP)
-}
-
-func TestAgentMerging_ModifyDefaultTaskAgent(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Agents: map[AgentID]Agent{
- AgentTask: {
- Model: SmallModel,
- AllowedMCP: map[string][]string{"search-mcp": nil},
- AllowedLSP: []string{"python"},
- Name: "Search Agent",
- Description: "Custom search agent",
- Disabled: true,
- AllowedTools: []string{"glob", "grep", "view"},
- },
- },
- MCP: map[string]MCP{
- "search-mcp": {
- Type: MCPStdio,
- Command: "search-mcp-command",
- Args: []string{"--search"},
- },
- },
- LSP: map[string]LSPConfig{
- "python": {
- Command: "pylsp",
- Args: []string{},
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- taskAgent := cfg.Agents[AgentTask]
- assert.Equal(t, "Task", taskAgent.Name)
- assert.Equal(t, "An agent that helps with searching for context and finding implementation details.", taskAgent.Description)
- assert.False(t, taskAgent.Disabled)
- assert.Equal(t, []string{"glob", "grep", "ls", "sourcegraph", "view"}, taskAgent.AllowedTools)
-
- assert.Equal(t, SmallModel, taskAgent.Model)
- assert.Equal(t, map[string][]string{"search-mcp": nil}, taskAgent.AllowedMCP)
- assert.Equal(t, []string{"python"}, taskAgent.AllowedLSP)
-}
-
-func TestAgentMerging_LocalOverridesGlobal(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Agents: map[AgentID]Agent{
- AgentID("test-agent"): {
- ID: AgentID("test-agent"),
- Name: "Global Agent",
- Description: "Global description",
- Model: LargeModel,
- AllowedTools: []string{"glob"},
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- // Create local config that overrides
- localConfig := Config{
- Agents: map[AgentID]Agent{
- AgentID("test-agent"): {
- Name: "Local Agent",
- Description: "Local description",
- Model: SmallModel,
- Disabled: true,
- AllowedTools: []string{"grep", "view"},
- AllowedMCP: map[string][]string{"local-mcp": {"tool1"}},
- },
- },
- MCP: map[string]MCP{
- "local-mcp": {
- Type: MCPStdio,
- Command: "local-mcp-command",
- Args: []string{"--local"},
- },
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- testAgent := cfg.Agents[AgentID("test-agent")]
- assert.Equal(t, "Local Agent", testAgent.Name)
- assert.Equal(t, "Local description", testAgent.Description)
- assert.Equal(t, SmallModel, testAgent.Model)
- assert.True(t, testAgent.Disabled)
- assert.Equal(t, []string{"grep", "view"}, testAgent.AllowedTools)
- assert.Equal(t, map[string][]string{"local-mcp": {"tool1"}}, testAgent.AllowedMCP)
-}
-
-func TestAgentModelTypeAssignment(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Agents: map[AgentID]Agent{
- AgentID("large-agent"): {
- ID: AgentID("large-agent"),
- Name: "Large Model Agent",
- Model: LargeModel,
- },
- AgentID("small-agent"): {
- ID: AgentID("small-agent"),
- Name: "Small Model Agent",
- Model: SmallModel,
- },
- AgentID("default-agent"): {
- ID: AgentID("default-agent"),
- Name: "Default Model Agent",
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- assert.Equal(t, LargeModel, cfg.Agents[AgentID("large-agent")].Model)
- assert.Equal(t, SmallModel, cfg.Agents[AgentID("small-agent")].Model)
- assert.Equal(t, LargeModel, cfg.Agents[AgentID("default-agent")].Model)
-}
-
-func TestAgentContextPathOverrides(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Options: Options{
- ContextPaths: []string{"global-context.md", "shared-context.md"},
- },
- Agents: map[AgentID]Agent{
- AgentID("custom-context-agent"): {
- ID: AgentID("custom-context-agent"),
- Name: "Custom Context Agent",
- ContextPaths: []string{"agent-specific.md", "custom.md"},
- },
- AgentID("default-context-agent"): {
- ID: AgentID("default-context-agent"),
- Name: "Default Context Agent",
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- customAgent := cfg.Agents[AgentID("custom-context-agent")]
- expectedCustomPaths := append(defaultContextPaths, "global-context.md", "shared-context.md", "agent-specific.md", "custom.md")
- assert.Equal(t, expectedCustomPaths, customAgent.ContextPaths)
-
- defaultAgent := cfg.Agents[AgentID("default-context-agent")]
- expectedContextPaths := append(defaultContextPaths, "global-context.md", "shared-context.md")
- assert.Equal(t, expectedContextPaths, defaultAgent.ContextPaths)
-
- coderAgent := cfg.Agents[AgentCoder]
- assert.Equal(t, expectedContextPaths, coderAgent.ContextPaths)
-}
-
-func TestOptionsMerging_ContextPaths(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Options: Options{
- ContextPaths: []string{"global1.md", "global2.md"},
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- localConfig := Config{
- Options: Options{
- ContextPaths: []string{"local1.md", "local2.md"},
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- expectedContextPaths := append(defaultContextPaths, "global1.md", "global2.md", "local1.md", "local2.md")
- assert.Equal(t, expectedContextPaths, cfg.Options.ContextPaths)
-}
-
-func TestOptionsMerging_TUIOptions(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Options: Options{
- TUI: TUIOptions{
- CompactMode: false,
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- localConfig := Config{
- Options: Options{
- TUI: TUIOptions{
- CompactMode: true,
- },
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- assert.True(t, cfg.Options.TUI.CompactMode)
-}
-
-func TestOptionsMerging_DebugFlags(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Options: Options{
- Debug: false,
- DebugLSP: false,
- DisableAutoSummarize: false,
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- localConfig := Config{
- Options: Options{
- DebugLSP: true,
- DisableAutoSummarize: true,
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- assert.False(t, cfg.Options.Debug)
- assert.True(t, cfg.Options.DebugLSP)
- assert.True(t, cfg.Options.DisableAutoSummarize)
-}
-
-func TestOptionsMerging_DataDirectory(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Options: Options{
- DataDirectory: "global-data",
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- localConfig := Config{
- Options: Options{
- DataDirectory: "local-data",
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- assert.Equal(t, "local-data", cfg.Options.DataDirectory)
-}
-
-func TestOptionsMerging_DefaultValues(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- assert.Equal(t, defaultDataDirectory, cfg.Options.DataDirectory)
- assert.Equal(t, defaultContextPaths, cfg.Options.ContextPaths)
- assert.False(t, cfg.Options.TUI.CompactMode)
- assert.False(t, cfg.Options.Debug)
- assert.False(t, cfg.Options.DebugLSP)
- assert.False(t, cfg.Options.DisableAutoSummarize)
-}
-
-func TestOptionsMerging_DebugFlagFromInit(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Options: Options{
- Debug: false,
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- cfg, err := Init(cwdDir, true)
-
- require.NoError(t, err)
-
- // Debug flag from Init should take precedence
- assert.True(t, cfg.Options.Debug)
-}
-
-func TestOptionsMerging_ComplexScenario(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- // Set up a provider
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- // Create global config with various options
- globalConfig := Config{
- Options: Options{
- ContextPaths: []string{"global-context.md"},
- DataDirectory: "global-data",
- Debug: false,
- DebugLSP: false,
- DisableAutoSummarize: false,
- TUI: TUIOptions{
- CompactMode: false,
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- // Create local config that partially overrides
- localConfig := Config{
- Options: Options{
- ContextPaths: []string{"local-context.md"},
- DebugLSP: true, // Override
- DisableAutoSummarize: true, // Override
- TUI: TUIOptions{
- CompactMode: true, // Override
- },
- // DataDirectory and Debug not specified - should keep global values
- },
- }
-
- localConfigPath := filepath.Join(cwdDir, "crush.json")
- data, err = json.Marshal(localConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(localConfigPath, data, 0o644))
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
-
- // Check merged results
- expectedContextPaths := append(defaultContextPaths, "global-context.md", "local-context.md")
- assert.Equal(t, expectedContextPaths, cfg.Options.ContextPaths)
- assert.Equal(t, "global-data", cfg.Options.DataDirectory) // From global
- assert.False(t, cfg.Options.Debug) // From global
- assert.True(t, cfg.Options.DebugLSP) // From local
- assert.True(t, cfg.Options.DisableAutoSummarize) // From local
- assert.True(t, cfg.Options.TUI.CompactMode) // From local
-}
-
-// Model Selection Tests
-
-func TestModelSelection_PreferredModelSelection(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- // Set up multiple providers to test selection logic
- os.Setenv("ANTHROPIC_API_KEY", "test-anthropic-key")
- os.Setenv("OPENAI_API_KEY", "test-openai-key")
-
- cfg, err := Init(cwdDir, false)
-
- require.NoError(t, err)
- require.Len(t, cfg.Providers, 2)
-
- // Should have preferred models set
- assert.NotEmpty(t, cfg.Models.Large.ModelID)
- assert.NotEmpty(t, cfg.Models.Large.Provider)
- assert.NotEmpty(t, cfg.Models.Small.ModelID)
- assert.NotEmpty(t, cfg.Models.Small.Provider)
-
- // Both should use the same provider (first available)
- assert.Equal(t, cfg.Models.Large.Provider, cfg.Models.Small.Provider)
-}
-
-func TestValidation_InvalidModelReference(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- globalConfig := Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "non-existent-model",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- ContextWindow: 4096,
- DefaultMaxTokens: 2048,
- },
- },
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- _, err = Init(cwdDir, false)
- assert.Error(t, err)
-}
-
-func TestValidation_InvalidAgentModelType(t *testing.T) {
- reset()
- testConfigDir = t.TempDir()
- cwdDir := t.TempDir()
-
- os.Setenv("ANTHROPIC_API_KEY", "test-key")
-
- globalConfig := Config{
- Agents: map[AgentID]Agent{
- AgentID("invalid-agent"): {
- ID: AgentID("invalid-agent"),
- Name: "Invalid Agent",
- Model: ModelType("invalid"),
- },
- },
- }
-
- configPath := filepath.Join(testConfigDir, "crush.json")
- require.NoError(t, os.MkdirAll(filepath.Dir(configPath), 0o755))
- data, err := json.Marshal(globalConfig)
- require.NoError(t, err)
- require.NoError(t, os.WriteFile(configPath, data, 0o644))
-
- _, err = Init(cwdDir, false)
- assert.Error(t, err)
-}
@@ -1,71 +0,0 @@
-package config
-
-import (
- "fmt"
- "os"
- "path/filepath"
- "runtime"
-)
-
-var testConfigDir string
-
-func baseConfigPath() string {
- if testConfigDir != "" {
- return testConfigDir
- }
-
- xdgConfigHome := os.Getenv("XDG_CONFIG_HOME")
- if xdgConfigHome != "" {
- return filepath.Join(xdgConfigHome, "crush")
- }
-
- // return the path to the main config directory
- // for windows, it should be in `%LOCALAPPDATA%/crush/`
- // for linux and macOS, it should be in `$HOME/.config/crush/`
- if runtime.GOOS == "windows" {
- localAppData := os.Getenv("LOCALAPPDATA")
- if localAppData == "" {
- localAppData = filepath.Join(os.Getenv("USERPROFILE"), "AppData", "Local")
- }
- return filepath.Join(localAppData, appName)
- }
-
- return filepath.Join(os.Getenv("HOME"), ".config", appName)
-}
-
-func baseDataPath() string {
- if testConfigDir != "" {
- return testConfigDir
- }
-
- xdgDataHome := os.Getenv("XDG_DATA_HOME")
- if xdgDataHome != "" {
- return filepath.Join(xdgDataHome, appName)
- }
-
- // return the path to the main data directory
- // for windows, it should be in `%LOCALAPPDATA%/crush/`
- // for linux and macOS, it should be in `$HOME/.local/share/crush/`
- if runtime.GOOS == "windows" {
- localAppData := os.Getenv("LOCALAPPDATA")
- if localAppData == "" {
- localAppData = filepath.Join(os.Getenv("USERPROFILE"), "AppData", "Local")
- }
- return filepath.Join(localAppData, appName)
- }
-
- return filepath.Join(os.Getenv("HOME"), ".local", "share", appName)
-}
-
-func ConfigPath() string {
- return filepath.Join(baseConfigPath(), fmt.Sprintf("%s.json", appName))
-}
-
-func CrushInitialized() bool {
- cfgPath := ConfigPath()
- if _, err := os.Stat(cfgPath); os.IsNotExist(err) {
- // config file does not exist, so Crush is not initialized
- return false
- }
- return true
-}
@@ -5,27 +5,49 @@ import (
"os"
"path/filepath"
"strings"
+ "sync"
+ "sync/atomic"
)
const (
- // InitFlagFilename is the name of the file that indicates whether the project has been initialized
InitFlagFilename = "init"
)
-// ProjectInitFlag represents the initialization status for a project directory
type ProjectInitFlag struct {
Initialized bool `json:"initialized"`
}
-// ProjectNeedsInitialization checks if the current project needs initialization
+// TODO: we need to remove the global config instance keeping it now just until everything is migrated
+var (
+ instance atomic.Pointer[Config]
+ cwd string
+ once sync.Once // Ensures the initialization happens only once
+)
+
+func Init(workingDir string, debug bool) (*Config, error) {
+ var err error
+ once.Do(func() {
+ cwd = workingDir
+ var cfg *Config
+ cfg, err = Load(cwd, debug)
+ instance.Store(cfg)
+ })
+
+ return instance.Load(), err
+}
+
+func Get() *Config {
+ return instance.Load()
+}
+
func ProjectNeedsInitialization() (bool, error) {
- if instance == nil {
+ cfg := Get()
+ if cfg == nil {
return false, fmt.Errorf("config not loaded")
}
- flagFilePath := filepath.Join(instance.Options.DataDirectory, InitFlagFilename)
+ flagFilePath := filepath.Join(cfg.Options.DataDirectory, InitFlagFilename)
- // Check if the flag file exists
_, err := os.Stat(flagFilePath)
if err == nil {
return false, nil
@@ -35,8 +57,7 @@ func ProjectNeedsInitialization() (bool, error) {
return false, fmt.Errorf("failed to check init flag file: %w", err)
}
- // Check if any variation of CRUSH.md already exists in working directory
- crushExists, err := crushMdExists(WorkingDirectory())
+ crushExists, err := crushMdExists(cfg.WorkingDir())
if err != nil {
return false, fmt.Errorf("failed to check for CRUSH.md files: %w", err)
}
@@ -47,7 +68,6 @@ func ProjectNeedsInitialization() (bool, error) {
return true, nil
}
-// crushMdExists checks if any case variation of crush.md exists in the directory
func crushMdExists(dir string) (bool, error) {
entries, err := os.ReadDir(dir)
if err != nil {
@@ -68,12 +88,12 @@ func crushMdExists(dir string) (bool, error) {
return false, nil
}
-// MarkProjectInitialized marks the current project as initialized
func MarkProjectInitialized() error {
- if instance == nil {
+ cfg := Get()
+ if cfg == nil {
return fmt.Errorf("config not loaded")
}
- flagFilePath := filepath.Join(instance.Options.DataDirectory, InitFlagFilename)
+ flagFilePath := filepath.Join(cfg.Options.DataDirectory, InitFlagFilename)
file, err := os.Create(flagFilePath)
if err != nil {
@@ -0,0 +1,539 @@
+package config
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "runtime"
+ "slices"
+ "strings"
+
+ "github.com/charmbracelet/crush/internal/env"
+ "github.com/charmbracelet/crush/internal/fur/client"
+ "github.com/charmbracelet/crush/internal/fur/provider"
+ "github.com/charmbracelet/crush/internal/log"
+ "golang.org/x/exp/slog"
+)
+
+// LoadReader config via io.Reader.
+func LoadReader(fd io.Reader) (*Config, error) {
+ data, err := io.ReadAll(fd)
+ if err != nil {
+ return nil, err
+ }
+
+ var config Config
+ err = json.Unmarshal(data, &config)
+ if err != nil {
+ return nil, err
+ }
+ return &config, err
+}
+
+// Load loads the configuration from the default paths.
+func Load(workingDir string, debug bool) (*Config, error) {
+ // uses default config paths
+ configPaths := []string{
+ globalConfig(),
+ globalConfigData(),
+ filepath.Join(workingDir, fmt.Sprintf("%s.json", appName)),
+ filepath.Join(workingDir, fmt.Sprintf(".%s.json", appName)),
+ }
+ cfg, err := loadFromConfigPaths(configPaths)
+ if err != nil {
+ return nil, fmt.Errorf("failed to load config from paths %v: %w", configPaths, err)
+ }
+
+ cfg.setDefaults(workingDir)
+
+ if debug {
+ cfg.Options.Debug = true
+ }
+
+ // Init logs
+ log.Init(
+ filepath.Join(cfg.Options.DataDirectory, "logs", fmt.Sprintf("%s.log", appName)),
+ cfg.Options.Debug,
+ )
+
+ if err != nil {
+ return nil, fmt.Errorf("failed to load config: %w", err)
+ }
+
+ // Load known providers, this loads the config from fur
+ providers, err := LoadProviders(client.New())
+ if err != nil || len(providers) == 0 {
+ return nil, fmt.Errorf("failed to load providers: %w", err)
+ }
+
+ env := env.New()
+ // Configure providers
+ valueResolver := NewShellVariableResolver(env)
+ cfg.resolver = valueResolver
+ if err := cfg.configureProviders(env, valueResolver, providers); err != nil {
+ return nil, fmt.Errorf("failed to configure providers: %w", err)
+ }
+
+ if !cfg.IsConfigured() {
+ slog.Warn("No providers configured")
+ return cfg, nil
+ }
+
+ if err := cfg.configureSelectedModels(providers); err != nil {
+ return nil, fmt.Errorf("failed to configure selected models: %w", err)
+ }
+
+ // TODO: remove the agents concept from the config
+ agents := map[string]Agent{
+ "coder": {
+ ID: "coder",
+ Name: "Coder",
+ Description: "An agent that helps with executing coding tasks.",
+ Model: SelectedModelTypeLarge,
+ ContextPaths: cfg.Options.ContextPaths,
+ // All tools allowed
+ },
+ "task": {
+ ID: "task",
+ Name: "Task",
+ Description: "An agent that helps with searching for context and finding implementation details.",
+ Model: SelectedModelTypeLarge,
+ ContextPaths: cfg.Options.ContextPaths,
+ AllowedTools: []string{
+ "glob",
+ "grep",
+ "ls",
+ "sourcegraph",
+ "view",
+ },
+ // NO MCPs or LSPs by default
+ AllowedMCP: map[string][]string{},
+ AllowedLSP: []string{},
+ },
+ }
+ cfg.Agents = agents
+
+ return cfg, nil
+}
+
+func (cfg *Config) configureProviders(env env.Env, resolver VariableResolver, knownProviders []provider.Provider) error {
+ knownProviderNames := make(map[string]bool)
+ for _, p := range knownProviders {
+ knownProviderNames[string(p.ID)] = true
+ config, configExists := cfg.Providers[string(p.ID)]
+ // if the user configured a known provider we need to allow it to override a couple of parameters
+ if configExists {
+ if config.Disable {
+ slog.Debug("Skipping provider due to disable flag", "provider", p.ID)
+ delete(cfg.Providers, string(p.ID))
+ continue
+ }
+ if config.BaseURL != "" {
+ p.APIEndpoint = config.BaseURL
+ }
+ if config.APIKey != "" {
+ p.APIKey = config.APIKey
+ }
+ if len(config.Models) > 0 {
+ models := []provider.Model{}
+ seen := make(map[string]bool)
+
+ for _, model := range config.Models {
+ if seen[model.ID] {
+ continue
+ }
+ seen[model.ID] = true
+ if model.Model == "" {
+ model.Model = model.ID
+ }
+ models = append(models, model)
+ }
+ for _, model := range p.Models {
+ if seen[model.ID] {
+ continue
+ }
+ seen[model.ID] = true
+ if model.Model == "" {
+ model.Model = model.ID
+ }
+ models = append(models, model)
+ }
+
+ p.Models = models
+ }
+ }
+ prepared := ProviderConfig{
+ ID: string(p.ID),
+ BaseURL: p.APIEndpoint,
+ APIKey: p.APIKey,
+ Type: p.Type,
+ Disable: config.Disable,
+ ExtraHeaders: config.ExtraHeaders,
+ ExtraParams: make(map[string]string),
+ Models: p.Models,
+ }
+
+ switch p.ID {
+ // Handle specific providers that require additional configuration
+ case provider.InferenceProviderVertexAI:
+ if !hasVertexCredentials(env) {
+ if configExists {
+ slog.Warn("Skipping Vertex AI provider due to missing credentials")
+ delete(cfg.Providers, string(p.ID))
+ }
+ continue
+ }
+ prepared.ExtraParams["project"] = env.Get("GOOGLE_CLOUD_PROJECT")
+ prepared.ExtraParams["location"] = env.Get("GOOGLE_CLOUD_LOCATION")
+ case provider.InferenceProviderBedrock:
+ if !hasAWSCredentials(env) {
+ if configExists {
+ slog.Warn("Skipping Bedrock provider due to missing AWS credentials")
+ delete(cfg.Providers, string(p.ID))
+ }
+ continue
+ }
+ for _, model := range p.Models {
+ if !strings.HasPrefix(model.ID, "anthropic.") {
+ return fmt.Errorf("bedrock provider only supports anthropic models for now, found: %s", model.ID)
+ }
+ }
+ default:
+ // if the provider api or endpoint are missing we skip them
+ v, err := resolver.ResolveValue(p.APIKey)
+ if v == "" || err != nil {
+ if configExists {
+ slog.Warn("Skipping provider due to missing API key", "provider", p.ID)
+ delete(cfg.Providers, string(p.ID))
+ }
+ continue
+ }
+ }
+ cfg.Providers[string(p.ID)] = prepared
+ }
+
+ // validate the custom providers
+ for id, providerConfig := range cfg.Providers {
+ if knownProviderNames[id] {
+ continue
+ }
+
+ // Make sure the provider ID is set
+ providerConfig.ID = id
+ // default to OpenAI if not set
+ if providerConfig.Type == "" {
+ providerConfig.Type = provider.TypeOpenAI
+ }
+
+ if providerConfig.Disable {
+ slog.Debug("Skipping custom provider due to disable flag", "provider", id)
+ delete(cfg.Providers, id)
+ continue
+ }
+ if providerConfig.APIKey == "" {
+ slog.Warn("Skipping custom provider due to missing API key", "provider", id)
+ delete(cfg.Providers, id)
+ continue
+ }
+ if providerConfig.BaseURL == "" {
+ slog.Warn("Skipping custom provider due to missing API endpoint", "provider", id)
+ delete(cfg.Providers, id)
+ continue
+ }
+ if len(providerConfig.Models) == 0 {
+ slog.Warn("Skipping custom provider because the provider has no models", "provider", id)
+ delete(cfg.Providers, id)
+ continue
+ }
+ if providerConfig.Type != provider.TypeOpenAI {
+ slog.Warn("Skipping custom provider because the provider type is not supported", "provider", id, "type", providerConfig.Type)
+ delete(cfg.Providers, id)
+ continue
+ }
+
+ apiKey, err := resolver.ResolveValue(providerConfig.APIKey)
+ if apiKey == "" || err != nil {
+ slog.Warn("Skipping custom provider due to missing API key", "provider", id, "error", err)
+ delete(cfg.Providers, id)
+ continue
+ }
+ baseURL, err := resolver.ResolveValue(providerConfig.BaseURL)
+ if baseURL == "" || err != nil {
+ slog.Warn("Skipping custom provider due to missing API endpoint", "provider", id, "error", err)
+ delete(cfg.Providers, id)
+ continue
+ }
+
+ cfg.Providers[id] = providerConfig
+ }
+ return nil
+}
+
+func (cfg *Config) setDefaults(workingDir string) {
+ cfg.workingDir = workingDir
+ if cfg.Options == nil {
+ cfg.Options = &Options{}
+ }
+ if cfg.Options.TUI == nil {
+ cfg.Options.TUI = &TUIOptions{}
+ }
+ if cfg.Options.ContextPaths == nil {
+ cfg.Options.ContextPaths = []string{}
+ }
+ if cfg.Options.DataDirectory == "" {
+ cfg.Options.DataDirectory = filepath.Join(workingDir, defaultDataDirectory)
+ }
+ if cfg.Providers == nil {
+ cfg.Providers = make(map[string]ProviderConfig)
+ }
+ if cfg.Models == nil {
+ cfg.Models = make(map[SelectedModelType]SelectedModel)
+ }
+ if cfg.MCP == nil {
+ cfg.MCP = make(map[string]MCPConfig)
+ }
+ if cfg.LSP == nil {
+ cfg.LSP = make(map[string]LSPConfig)
+ }
+
+ // Add the default context paths if they are not already present
+ cfg.Options.ContextPaths = append(defaultContextPaths, cfg.Options.ContextPaths...)
+ slices.Sort(cfg.Options.ContextPaths)
+ cfg.Options.ContextPaths = slices.Compact(cfg.Options.ContextPaths)
+}
+
+func (cfg *Config) defaultModelSelection(knownProviders []provider.Provider) (largeModel SelectedModel, smallModel SelectedModel, err error) {
+ if len(knownProviders) == 0 && len(cfg.Providers) == 0 {
+ err = fmt.Errorf("no providers configured, please configure at least one provider")
+ return
+ }
+
+ // Use the first provider enabled based on the known providers order
+ // if no provider found that is known use the first provider configured
+ for _, p := range knownProviders {
+ providerConfig, ok := cfg.Providers[string(p.ID)]
+ if !ok || providerConfig.Disable {
+ continue
+ }
+ defaultLargeModel := cfg.GetModel(string(p.ID), p.DefaultLargeModelID)
+ if defaultLargeModel == nil {
+ err = fmt.Errorf("default large model %s not found for provider %s", p.DefaultLargeModelID, p.ID)
+ return
+ }
+ largeModel = SelectedModel{
+ Provider: string(p.ID),
+ Model: defaultLargeModel.ID,
+ MaxTokens: defaultLargeModel.DefaultMaxTokens,
+ ReasoningEffort: defaultLargeModel.DefaultReasoningEffort,
+ }
+
+ defaultSmallModel := cfg.GetModel(string(p.ID), p.DefaultSmallModelID)
+ if defaultSmallModel == nil {
+ err = fmt.Errorf("default small model %s not found for provider %s", p.DefaultSmallModelID, p.ID)
+ }
+ smallModel = SelectedModel{
+ Provider: string(p.ID),
+ Model: defaultSmallModel.ID,
+ MaxTokens: defaultSmallModel.DefaultMaxTokens,
+ ReasoningEffort: defaultSmallModel.DefaultReasoningEffort,
+ }
+ return
+ }
+
+ enabledProviders := cfg.EnabledProviders()
+ slices.SortFunc(enabledProviders, func(a, b ProviderConfig) int {
+ return strings.Compare(a.ID, b.ID)
+ })
+
+ if len(enabledProviders) == 0 {
+ err = fmt.Errorf("no providers configured, please configure at least one provider")
+ return
+ }
+
+ providerConfig := enabledProviders[0]
+ if len(providerConfig.Models) == 0 {
+ err = fmt.Errorf("provider %s has no models configured", providerConfig.ID)
+ return
+ }
+ defaultLargeModel := cfg.GetModel(providerConfig.ID, providerConfig.Models[0].ID)
+ largeModel = SelectedModel{
+ Provider: providerConfig.ID,
+ Model: defaultLargeModel.ID,
+ MaxTokens: defaultLargeModel.DefaultMaxTokens,
+ }
+ defaultSmallModel := cfg.GetModel(providerConfig.ID, providerConfig.Models[0].ID)
+ smallModel = SelectedModel{
+ Provider: providerConfig.ID,
+ Model: defaultSmallModel.ID,
+ MaxTokens: defaultSmallModel.DefaultMaxTokens,
+ }
+ return
+}
+
+func (cfg *Config) configureSelectedModels(knownProviders []provider.Provider) error {
+ large, small, err := cfg.defaultModelSelection(knownProviders)
+ if err != nil {
+ return fmt.Errorf("failed to select default models: %w", err)
+ }
+
+ largeModelSelected, largeModelConfigured := cfg.Models[SelectedModelTypeLarge]
+ if largeModelConfigured {
+ if largeModelSelected.Model != "" {
+ large.Model = largeModelSelected.Model
+ }
+ if largeModelSelected.Provider != "" {
+ large.Provider = largeModelSelected.Provider
+ }
+ model := cfg.GetModel(large.Provider, large.Model)
+ if model == nil {
+ return fmt.Errorf("large model %s not found for provider %s", large.Model, large.Provider)
+ }
+ if largeModelSelected.MaxTokens > 0 {
+ large.MaxTokens = largeModelSelected.MaxTokens
+ } else {
+ large.MaxTokens = model.DefaultMaxTokens
+ }
+ if largeModelSelected.ReasoningEffort != "" {
+ large.ReasoningEffort = largeModelSelected.ReasoningEffort
+ }
+ large.Think = largeModelSelected.Think
+ }
+ smallModelSelected, smallModelConfigured := cfg.Models[SelectedModelTypeSmall]
+ if smallModelConfigured {
+ if smallModelSelected.Model != "" {
+ small.Model = smallModelSelected.Model
+ }
+ if smallModelSelected.Provider != "" {
+ small.Provider = smallModelSelected.Provider
+ }
+
+ model := cfg.GetModel(small.Provider, small.Model)
+ if model == nil {
+ return fmt.Errorf("large model %s not found for provider %s", large.Model, large.Provider)
+ }
+ if smallModelSelected.MaxTokens > 0 {
+ small.MaxTokens = smallModelSelected.MaxTokens
+ } else {
+ small.MaxTokens = model.DefaultMaxTokens
+ }
+ small.ReasoningEffort = smallModelSelected.ReasoningEffort
+ small.Think = smallModelSelected.Think
+ }
+
+ // validate the selected models
+ largeModel := cfg.GetModel(large.Provider, large.Model)
+ if largeModel == nil {
+ return fmt.Errorf("large model %s not found for provider %s", large.Model, large.Provider)
+ }
+ smallModel := cfg.GetModel(small.Provider, small.Model)
+ if smallModel == nil {
+ return fmt.Errorf("small model %s not found for provider %s", small.Model, small.Provider)
+ }
+ cfg.Models[SelectedModelTypeLarge] = large
+ cfg.Models[SelectedModelTypeSmall] = small
+ return nil
+}
+
+func loadFromConfigPaths(configPaths []string) (*Config, error) {
+ var configs []io.Reader
+
+ for _, path := range configPaths {
+ fd, err := os.Open(path)
+ if err != nil {
+ if os.IsNotExist(err) {
+ continue
+ }
+ return nil, fmt.Errorf("failed to open config file %s: %w", path, err)
+ }
+ defer fd.Close()
+
+ configs = append(configs, fd)
+ }
+
+ return loadFromReaders(configs)
+}
+
+func loadFromReaders(readers []io.Reader) (*Config, error) {
+ if len(readers) == 0 {
+ return &Config{}, nil
+ }
+
+ merged, err := Merge(readers)
+ if err != nil {
+ return nil, fmt.Errorf("failed to merge configuration readers: %w", err)
+ }
+
+ return LoadReader(merged)
+}
+
+func hasVertexCredentials(env env.Env) bool {
+ useVertex := env.Get("GOOGLE_GENAI_USE_VERTEXAI") == "true"
+ hasProject := env.Get("GOOGLE_CLOUD_PROJECT") != ""
+ hasLocation := env.Get("GOOGLE_CLOUD_LOCATION") != ""
+ return useVertex && hasProject && hasLocation
+}
+
+func hasAWSCredentials(env env.Env) bool {
+ if env.Get("AWS_ACCESS_KEY_ID") != "" && env.Get("AWS_SECRET_ACCESS_KEY") != "" {
+ return true
+ }
+
+ if env.Get("AWS_PROFILE") != "" || env.Get("AWS_DEFAULT_PROFILE") != "" {
+ return true
+ }
+
+ if env.Get("AWS_REGION") != "" || env.Get("AWS_DEFAULT_REGION") != "" {
+ return true
+ }
+
+ if env.Get("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") != "" ||
+ env.Get("AWS_CONTAINER_CREDENTIALS_FULL_URI") != "" {
+ return true
+ }
+
+ return false
+}
+
+func globalConfig() string {
+ xdgConfigHome := os.Getenv("XDG_CONFIG_HOME")
+ if xdgConfigHome != "" {
+ return filepath.Join(xdgConfigHome, appName, fmt.Sprintf("%s.json", appName))
+ }
+
+ // return the path to the main config directory
+ // for windows, it should be in `%LOCALAPPDATA%/crush/`
+ // for linux and macOS, it should be in `$HOME/.config/crush/`
+ if runtime.GOOS == "windows" {
+ localAppData := os.Getenv("LOCALAPPDATA")
+ if localAppData == "" {
+ localAppData = filepath.Join(os.Getenv("USERPROFILE"), "AppData", "Local")
+ }
+ return filepath.Join(localAppData, appName, fmt.Sprintf("%s.json", appName))
+ }
+
+ return filepath.Join(os.Getenv("HOME"), ".config", appName, fmt.Sprintf("%s.json", appName))
+}
+
+// globalConfigData returns the path to the main data directory for the application.
+// this config is used when the app overrides configurations instead of updating the global config.
+func globalConfigData() string {
+ xdgDataHome := os.Getenv("XDG_DATA_HOME")
+ if xdgDataHome != "" {
+ return filepath.Join(xdgDataHome, appName, fmt.Sprintf("%s.json", appName))
+ }
+
+ // return the path to the main data directory
+ // for windows, it should be in `%LOCALAPPDATA%/crush/`
+ // for linux and macOS, it should be in `$HOME/.local/share/crush/`
+ if runtime.GOOS == "windows" {
+ localAppData := os.Getenv("LOCALAPPDATA")
+ if localAppData == "" {
+ localAppData = filepath.Join(os.Getenv("USERPROFILE"), "AppData", "Local")
+ }
+ return filepath.Join(localAppData, appName, fmt.Sprintf("%s.json", appName))
+ }
+
+ return filepath.Join(os.Getenv("HOME"), ".local", "share", appName, fmt.Sprintf("%s.json", appName))
+}
@@ -0,0 +1,1150 @@
+package config
+
+import (
+ "io"
+ "log/slog"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "github.com/charmbracelet/crush/internal/env"
+ "github.com/charmbracelet/crush/internal/fur/provider"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMain(m *testing.M) {
+ slog.SetDefault(slog.New(slog.NewTextHandler(io.Discard, nil)))
+
+ exitVal := m.Run()
+ os.Exit(exitVal)
+}
+
+func TestConfig_LoadFromReaders(t *testing.T) {
+ data1 := strings.NewReader(`{"providers": {"openai": {"api_key": "key1", "base_url": "https://api.openai.com/v1"}}}`)
+ data2 := strings.NewReader(`{"providers": {"openai": {"api_key": "key2", "base_url": "https://api.openai.com/v2"}}}`)
+ data3 := strings.NewReader(`{"providers": {"openai": {}}}`)
+
+ loadedConfig, err := loadFromReaders([]io.Reader{data1, data2, data3})
+
+ assert.NoError(t, err)
+ assert.NotNil(t, loadedConfig)
+ assert.Len(t, loadedConfig.Providers, 1)
+ assert.Equal(t, "key2", loadedConfig.Providers["openai"].APIKey)
+ assert.Equal(t, "https://api.openai.com/v2", loadedConfig.Providers["openai"].BaseURL)
+}
+
+func TestConfig_setDefaults(t *testing.T) {
+ cfg := &Config{}
+
+ cfg.setDefaults("/tmp")
+
+ assert.NotNil(t, cfg.Options)
+ assert.NotNil(t, cfg.Options.TUI)
+ assert.NotNil(t, cfg.Options.ContextPaths)
+ assert.NotNil(t, cfg.Providers)
+ assert.NotNil(t, cfg.Models)
+ assert.NotNil(t, cfg.LSP)
+ assert.NotNil(t, cfg.MCP)
+ assert.Equal(t, filepath.Join("/tmp", ".crush"), cfg.Options.DataDirectory)
+ for _, path := range defaultContextPaths {
+ assert.Contains(t, cfg.Options.ContextPaths, path)
+ }
+ assert.Equal(t, "/tmp", cfg.workingDir)
+}
+
+func TestConfig_configureProviders(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$OPENAI_API_KEY",
+ APIEndpoint: "https://api.openai.com/v1",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{
+ "OPENAI_API_KEY": "test-key",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ assert.Len(t, cfg.Providers, 1)
+
+ // We want to make sure that we keep the configured API key as a placeholder
+ assert.Equal(t, "$OPENAI_API_KEY", cfg.Providers["openai"].APIKey)
+}
+
+func TestConfig_configureProvidersWithOverride(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$OPENAI_API_KEY",
+ APIEndpoint: "https://api.openai.com/v1",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "openai": {
+ APIKey: "xyz",
+ BaseURL: "https://api.openai.com/v2",
+ Models: []provider.Model{
+ {
+ ID: "test-model",
+ Model: "Updated",
+ },
+ {
+ ID: "another-model",
+ },
+ },
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{
+ "OPENAI_API_KEY": "test-key",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ assert.Len(t, cfg.Providers, 1)
+
+ // We want to make sure that we keep the configured API key as a placeholder
+ assert.Equal(t, "xyz", cfg.Providers["openai"].APIKey)
+ assert.Equal(t, "https://api.openai.com/v2", cfg.Providers["openai"].BaseURL)
+ assert.Len(t, cfg.Providers["openai"].Models, 2)
+ assert.Equal(t, "Updated", cfg.Providers["openai"].Models[0].Model)
+}
+
+func TestConfig_configureProvidersWithNewProvider(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$OPENAI_API_KEY",
+ APIEndpoint: "https://api.openai.com/v1",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "xyz",
+ BaseURL: "https://api.someendpoint.com/v2",
+ Models: []provider.Model{
+ {
+ ID: "test-model",
+ },
+ },
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{
+ "OPENAI_API_KEY": "test-key",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ // Should be to because of the env variable
+ assert.Len(t, cfg.Providers, 2)
+
+ // We want to make sure that we keep the configured API key as a placeholder
+ assert.Equal(t, "xyz", cfg.Providers["custom"].APIKey)
+ // Make sure we set the ID correctly
+ assert.Equal(t, "custom", cfg.Providers["custom"].ID)
+ assert.Equal(t, "https://api.someendpoint.com/v2", cfg.Providers["custom"].BaseURL)
+ assert.Len(t, cfg.Providers["custom"].Models, 1)
+
+ _, ok := cfg.Providers["openai"]
+ assert.True(t, ok, "OpenAI provider should still be present")
+}
+
+func TestConfig_configureProvidersBedrockWithCredentials(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: provider.InferenceProviderBedrock,
+ APIKey: "",
+ APIEndpoint: "",
+ Models: []provider.Model{{
+ ID: "anthropic.claude-sonnet-4-20250514-v1:0",
+ }},
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{
+ "AWS_ACCESS_KEY_ID": "test-key-id",
+ "AWS_SECRET_ACCESS_KEY": "test-secret-key",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ assert.Len(t, cfg.Providers, 1)
+
+ bedrockProvider, ok := cfg.Providers["bedrock"]
+ assert.True(t, ok, "Bedrock provider should be present")
+ assert.Len(t, bedrockProvider.Models, 1)
+ assert.Equal(t, "anthropic.claude-sonnet-4-20250514-v1:0", bedrockProvider.Models[0].ID)
+}
+
+func TestConfig_configureProvidersBedrockWithoutCredentials(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: provider.InferenceProviderBedrock,
+ APIKey: "",
+ APIEndpoint: "",
+ Models: []provider.Model{{
+ ID: "anthropic.claude-sonnet-4-20250514-v1:0",
+ }},
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ // Provider should not be configured without credentials
+ assert.Len(t, cfg.Providers, 0)
+}
+
+func TestConfig_configureProvidersBedrockWithoutUnsupportedModel(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: provider.InferenceProviderBedrock,
+ APIKey: "",
+ APIEndpoint: "",
+ Models: []provider.Model{{
+ ID: "some-random-model",
+ }},
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{
+ "AWS_ACCESS_KEY_ID": "test-key-id",
+ "AWS_SECRET_ACCESS_KEY": "test-secret-key",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.Error(t, err)
+}
+
+func TestConfig_configureProvidersVertexAIWithCredentials(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: provider.InferenceProviderVertexAI,
+ APIKey: "",
+ APIEndpoint: "",
+ Models: []provider.Model{{
+ ID: "gemini-pro",
+ }},
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{
+ "GOOGLE_GENAI_USE_VERTEXAI": "true",
+ "GOOGLE_CLOUD_PROJECT": "test-project",
+ "GOOGLE_CLOUD_LOCATION": "us-central1",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ assert.Len(t, cfg.Providers, 1)
+
+ vertexProvider, ok := cfg.Providers["vertexai"]
+ assert.True(t, ok, "VertexAI provider should be present")
+ assert.Len(t, vertexProvider.Models, 1)
+ assert.Equal(t, "gemini-pro", vertexProvider.Models[0].ID)
+ assert.Equal(t, "test-project", vertexProvider.ExtraParams["project"])
+ assert.Equal(t, "us-central1", vertexProvider.ExtraParams["location"])
+}
+
+func TestConfig_configureProvidersVertexAIWithoutCredentials(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: provider.InferenceProviderVertexAI,
+ APIKey: "",
+ APIEndpoint: "",
+ Models: []provider.Model{{
+ ID: "gemini-pro",
+ }},
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{
+ "GOOGLE_GENAI_USE_VERTEXAI": "false",
+ "GOOGLE_CLOUD_PROJECT": "test-project",
+ "GOOGLE_CLOUD_LOCATION": "us-central1",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ // Provider should not be configured without proper credentials
+ assert.Len(t, cfg.Providers, 0)
+}
+
+func TestConfig_configureProvidersVertexAIMissingProject(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: provider.InferenceProviderVertexAI,
+ APIKey: "",
+ APIEndpoint: "",
+ Models: []provider.Model{{
+ ID: "gemini-pro",
+ }},
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{
+ "GOOGLE_GENAI_USE_VERTEXAI": "true",
+ "GOOGLE_CLOUD_LOCATION": "us-central1",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ // Provider should not be configured without project
+ assert.Len(t, cfg.Providers, 0)
+}
+
+func TestConfig_configureProvidersSetProviderID(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$OPENAI_API_KEY",
+ APIEndpoint: "https://api.openai.com/v1",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{
+ "OPENAI_API_KEY": "test-key",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ assert.Len(t, cfg.Providers, 1)
+
+ // Provider ID should be set
+ assert.Equal(t, "openai", cfg.Providers["openai"].ID)
+}
+
+func TestConfig_EnabledProviders(t *testing.T) {
+ t.Run("all providers enabled", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "openai": {
+ ID: "openai",
+ APIKey: "key1",
+ Disable: false,
+ },
+ "anthropic": {
+ ID: "anthropic",
+ APIKey: "key2",
+ Disable: false,
+ },
+ },
+ }
+
+ enabled := cfg.EnabledProviders()
+ assert.Len(t, enabled, 2)
+ })
+
+ t.Run("some providers disabled", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "openai": {
+ ID: "openai",
+ APIKey: "key1",
+ Disable: false,
+ },
+ "anthropic": {
+ ID: "anthropic",
+ APIKey: "key2",
+ Disable: true,
+ },
+ },
+ }
+
+ enabled := cfg.EnabledProviders()
+ assert.Len(t, enabled, 1)
+ assert.Equal(t, "openai", enabled[0].ID)
+ })
+
+ t.Run("empty providers map", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{},
+ }
+
+ enabled := cfg.EnabledProviders()
+ assert.Len(t, enabled, 0)
+ })
+}
+
+func TestConfig_IsConfigured(t *testing.T) {
+ t.Run("returns true when at least one provider is enabled", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "openai": {
+ ID: "openai",
+ APIKey: "key1",
+ Disable: false,
+ },
+ },
+ }
+
+ assert.True(t, cfg.IsConfigured())
+ })
+
+ t.Run("returns false when no providers are configured", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{},
+ }
+
+ assert.False(t, cfg.IsConfigured())
+ })
+
+ t.Run("returns false when all providers are disabled", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "openai": {
+ ID: "openai",
+ APIKey: "key1",
+ Disable: true,
+ },
+ "anthropic": {
+ ID: "anthropic",
+ APIKey: "key2",
+ Disable: true,
+ },
+ },
+ }
+
+ assert.False(t, cfg.IsConfigured())
+ })
+}
+
+func TestConfig_configureProvidersWithDisabledProvider(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$OPENAI_API_KEY",
+ APIEndpoint: "https://api.openai.com/v1",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "openai": {
+ Disable: true,
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{
+ "OPENAI_API_KEY": "test-key",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ // Provider should be removed from config when disabled
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["openai"]
+ assert.False(t, exists)
+}
+
+func TestConfig_configureProvidersCustomProviderValidation(t *testing.T) {
+ t.Run("custom provider with missing API key is removed", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ BaseURL: "https://api.custom.com/v1",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, []provider.Provider{})
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["custom"]
+ assert.False(t, exists)
+ })
+
+ t.Run("custom provider with missing BaseURL is removed", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "test-key",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, []provider.Provider{})
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["custom"]
+ assert.False(t, exists)
+ })
+
+ t.Run("custom provider with no models is removed", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "test-key",
+ BaseURL: "https://api.custom.com/v1",
+ Models: []provider.Model{},
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, []provider.Provider{})
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["custom"]
+ assert.False(t, exists)
+ })
+
+ t.Run("custom provider with unsupported type is removed", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "test-key",
+ BaseURL: "https://api.custom.com/v1",
+ Type: "unsupported",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, []provider.Provider{})
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["custom"]
+ assert.False(t, exists)
+ })
+
+ t.Run("valid custom provider is kept and ID is set", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "test-key",
+ BaseURL: "https://api.custom.com/v1",
+ Type: provider.TypeOpenAI,
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, []provider.Provider{})
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 1)
+ customProvider, exists := cfg.Providers["custom"]
+ assert.True(t, exists)
+ assert.Equal(t, "custom", customProvider.ID)
+ assert.Equal(t, "test-key", customProvider.APIKey)
+ assert.Equal(t, "https://api.custom.com/v1", customProvider.BaseURL)
+ })
+
+ t.Run("disabled custom provider is removed", func(t *testing.T) {
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "test-key",
+ BaseURL: "https://api.custom.com/v1",
+ Type: provider.TypeOpenAI,
+ Disable: true,
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, []provider.Provider{})
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["custom"]
+ assert.False(t, exists)
+ })
+}
+
+func TestConfig_configureProvidersEnhancedCredentialValidation(t *testing.T) {
+ t.Run("VertexAI provider removed when credentials missing with existing config", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: provider.InferenceProviderVertexAI,
+ APIKey: "",
+ APIEndpoint: "",
+ Models: []provider.Model{{
+ ID: "gemini-pro",
+ }},
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "vertexai": {
+ BaseURL: "custom-url",
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{
+ "GOOGLE_GENAI_USE_VERTEXAI": "false",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["vertexai"]
+ assert.False(t, exists)
+ })
+
+ t.Run("Bedrock provider removed when AWS credentials missing with existing config", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: provider.InferenceProviderBedrock,
+ APIKey: "",
+ APIEndpoint: "",
+ Models: []provider.Model{{
+ ID: "anthropic.claude-sonnet-4-20250514-v1:0",
+ }},
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "bedrock": {
+ BaseURL: "custom-url",
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["bedrock"]
+ assert.False(t, exists)
+ })
+
+ t.Run("provider removed when API key missing with existing config", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$MISSING_API_KEY",
+ APIEndpoint: "https://api.openai.com/v1",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "openai": {
+ BaseURL: "custom-url",
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 0)
+ _, exists := cfg.Providers["openai"]
+ assert.False(t, exists)
+ })
+
+ t.Run("known provider should still be added if the endpoint is missing the client will use default endpoints", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$OPENAI_API_KEY",
+ APIEndpoint: "$MISSING_ENDPOINT",
+ Models: []provider.Model{{
+ ID: "test-model",
+ }},
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "openai": {
+ APIKey: "test-key",
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+
+ env := env.NewFromMap(map[string]string{
+ "OPENAI_API_KEY": "test-key",
+ })
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ assert.Len(t, cfg.Providers, 1)
+ _, exists := cfg.Providers["openai"]
+ assert.True(t, exists)
+ })
+}
+
+func TestConfig_defaultModelSelection(t *testing.T) {
+ t.Run("default behavior uses the default models for given provider", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "abc",
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ large, small, err := cfg.defaultModelSelection(knownProviders)
+ assert.NoError(t, err)
+ assert.Equal(t, "large-model", large.Model)
+ assert.Equal(t, "openai", large.Provider)
+ assert.Equal(t, int64(1000), large.MaxTokens)
+ assert.Equal(t, "small-model", small.Model)
+ assert.Equal(t, "openai", small.Provider)
+ assert.Equal(t, int64(500), small.MaxTokens)
+ })
+ t.Run("should error if no providers configured", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$MISSING_KEY",
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ _, _, err = cfg.defaultModelSelection(knownProviders)
+ assert.Error(t, err)
+ })
+ t.Run("should error if model is missing", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "abc",
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "not-large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{}
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ _, _, err = cfg.defaultModelSelection(knownProviders)
+ assert.Error(t, err)
+ })
+
+ t.Run("should configure the default models with a custom provider", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$MISSING", // will not be included in the config
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "not-large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "test-key",
+ BaseURL: "https://api.custom.com/v1",
+ Models: []provider.Model{
+ {
+ ID: "model",
+ DefaultMaxTokens: 600,
+ },
+ },
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ large, small, err := cfg.defaultModelSelection(knownProviders)
+ assert.NoError(t, err)
+ assert.Equal(t, "model", large.Model)
+ assert.Equal(t, "custom", large.Provider)
+ assert.Equal(t, int64(600), large.MaxTokens)
+ assert.Equal(t, "model", small.Model)
+ assert.Equal(t, "custom", small.Provider)
+ assert.Equal(t, int64(600), small.MaxTokens)
+ })
+
+ t.Run("should fail if no model configured", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "$MISSING", // will not be included in the config
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "not-large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "test-key",
+ BaseURL: "https://api.custom.com/v1",
+ Models: []provider.Model{},
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ _, _, err = cfg.defaultModelSelection(knownProviders)
+ assert.Error(t, err)
+ })
+ t.Run("should use the default provider first", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "set",
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{
+ Providers: map[string]ProviderConfig{
+ "custom": {
+ APIKey: "test-key",
+ BaseURL: "https://api.custom.com/v1",
+ Models: []provider.Model{
+ {
+ ID: "large-model",
+ DefaultMaxTokens: 1000,
+ },
+ },
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+ large, small, err := cfg.defaultModelSelection(knownProviders)
+ assert.NoError(t, err)
+ assert.Equal(t, "large-model", large.Model)
+ assert.Equal(t, "openai", large.Provider)
+ assert.Equal(t, int64(1000), large.MaxTokens)
+ assert.Equal(t, "small-model", small.Model)
+ assert.Equal(t, "openai", small.Provider)
+ assert.Equal(t, int64(500), small.MaxTokens)
+ })
+}
+
+func TestConfig_configureSelectedModels(t *testing.T) {
+ t.Run("should override defaults", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "abc",
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "larger-model",
+ DefaultMaxTokens: 2000,
+ },
+ {
+ ID: "large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{
+ Models: map[SelectedModelType]SelectedModel{
+ "large": {
+ Model: "larger-model",
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ err = cfg.configureSelectedModels(knownProviders)
+ assert.NoError(t, err)
+ large := cfg.Models[SelectedModelTypeLarge]
+ small := cfg.Models[SelectedModelTypeSmall]
+ assert.Equal(t, "larger-model", large.Model)
+ assert.Equal(t, "openai", large.Provider)
+ assert.Equal(t, int64(2000), large.MaxTokens)
+ assert.Equal(t, "small-model", small.Model)
+ assert.Equal(t, "openai", small.Provider)
+ assert.Equal(t, int64(500), small.MaxTokens)
+ })
+ t.Run("should be possible to use multiple providers", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "abc",
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ {
+ ID: "anthropic",
+ APIKey: "abc",
+ DefaultLargeModelID: "a-large-model",
+ DefaultSmallModelID: "a-small-model",
+ Models: []provider.Model{
+ {
+ ID: "a-large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "a-small-model",
+ DefaultMaxTokens: 200,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{
+ Models: map[SelectedModelType]SelectedModel{
+ "small": {
+ Model: "a-small-model",
+ Provider: "anthropic",
+ MaxTokens: 300,
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ err = cfg.configureSelectedModels(knownProviders)
+ assert.NoError(t, err)
+ large := cfg.Models[SelectedModelTypeLarge]
+ small := cfg.Models[SelectedModelTypeSmall]
+ assert.Equal(t, "large-model", large.Model)
+ assert.Equal(t, "openai", large.Provider)
+ assert.Equal(t, int64(1000), large.MaxTokens)
+ assert.Equal(t, "a-small-model", small.Model)
+ assert.Equal(t, "anthropic", small.Provider)
+ assert.Equal(t, int64(300), small.MaxTokens)
+ })
+
+ t.Run("should override the max tokens only", func(t *testing.T) {
+ knownProviders := []provider.Provider{
+ {
+ ID: "openai",
+ APIKey: "abc",
+ DefaultLargeModelID: "large-model",
+ DefaultSmallModelID: "small-model",
+ Models: []provider.Model{
+ {
+ ID: "large-model",
+ DefaultMaxTokens: 1000,
+ },
+ {
+ ID: "small-model",
+ DefaultMaxTokens: 500,
+ },
+ },
+ },
+ }
+
+ cfg := &Config{
+ Models: map[SelectedModelType]SelectedModel{
+ "large": {
+ MaxTokens: 100,
+ },
+ },
+ }
+ cfg.setDefaults("/tmp")
+ env := env.NewFromMap(map[string]string{})
+ resolver := NewEnvironmentVariableResolver(env)
+ err := cfg.configureProviders(env, resolver, knownProviders)
+ assert.NoError(t, err)
+
+ err = cfg.configureSelectedModels(knownProviders)
+ assert.NoError(t, err)
+ large := cfg.Models[SelectedModelTypeLarge]
+ assert.Equal(t, "large-model", large.Model)
+ assert.Equal(t, "openai", large.Provider)
+ assert.Equal(t, int64(100), large.MaxTokens)
+ })
+}
@@ -0,0 +1,16 @@
+package config
+
+import (
+ "bytes"
+ "io"
+
+ "github.com/qjebbs/go-jsons"
+)
+
+func Merge(data []io.Reader) (io.Reader, error) {
+ got, err := jsons.Merge(data)
+ if err != nil {
+ return nil, err
+ }
+ return bytes.NewReader(got), nil
+}
@@ -0,0 +1,27 @@
+package config
+
+import (
+ "io"
+ "strings"
+ "testing"
+)
+
+func TestMerge(t *testing.T) {
+ data1 := strings.NewReader(`{"foo": "bar"}`)
+ data2 := strings.NewReader(`{"baz": "qux"}`)
+
+ merged, err := Merge([]io.Reader{data1, data2})
+ if err != nil {
+ t.Fatalf("expected no error, got %v", err)
+ }
+
+ expected := `{"baz":"qux","foo":"bar"}`
+ got, err := io.ReadAll(merged)
+ if err != nil {
+ t.Fatalf("expected no error reading merged data, got %v", err)
+ }
+
+ if string(got) != expected {
+ t.Errorf("expected %s, got %s", expected, string(got))
+ }
+}
@@ -4,27 +4,44 @@ import (
"encoding/json"
"os"
"path/filepath"
+ "runtime"
"sync"
"github.com/charmbracelet/crush/internal/fur/client"
"github.com/charmbracelet/crush/internal/fur/provider"
)
-var fur = client.New()
+type ProviderClient interface {
+ GetProviders() ([]provider.Provider, error)
+}
var (
- providerOnc sync.Once // Ensures the initialization happens only once
+ providerOnce sync.Once
providerList []provider.Provider
- // UseMockProviders can be set to true in tests to avoid API calls
- UseMockProviders bool
)
-func providersPath() string {
- return filepath.Join(baseDataPath(), "providers.json")
+// file to cache provider data
+func providerCacheFileData() string {
+ xdgDataHome := os.Getenv("XDG_DATA_HOME")
+ if xdgDataHome != "" {
+ return filepath.Join(xdgDataHome, appName, "providers.json")
+ }
+
+ // return the path to the main data directory
+ // for windows, it should be in `%LOCALAPPDATA%/crush/`
+ // for linux and macOS, it should be in `$HOME/.local/share/crush/`
+ if runtime.GOOS == "windows" {
+ localAppData := os.Getenv("LOCALAPPDATA")
+ if localAppData == "" {
+ localAppData = filepath.Join(os.Getenv("USERPROFILE"), "AppData", "Local")
+ }
+ return filepath.Join(localAppData, appName, "providers.json")
+ }
+
+ return filepath.Join(os.Getenv("HOME"), ".local", "share", appName, "providers.json")
}
-func saveProviders(providers []provider.Provider) error {
- path := providersPath()
+func saveProvidersInCache(path string, providers []provider.Provider) error {
dir := filepath.Dir(path)
if err := os.MkdirAll(dir, 0o755); err != nil {
return err
@@ -38,8 +55,7 @@ func saveProviders(providers []provider.Provider) error {
return os.WriteFile(path, data, 0o644)
}
-func loadProviders() ([]provider.Provider, error) {
- path := providersPath()
+func loadProvidersFromCache(path string) ([]provider.Provider, error) {
data, err := os.ReadFile(path)
if err != nil {
return nil, err
@@ -50,34 +66,33 @@ func loadProviders() ([]provider.Provider, error) {
return providers, err
}
-func Providers() []provider.Provider {
- providerOnc.Do(func() {
- // Use mock providers when testing
- if UseMockProviders {
- providerList = MockProviders()
- return
+func loadProviders(path string, client ProviderClient) ([]provider.Provider, error) {
+ providers, err := client.GetProviders()
+ if err != nil {
+ fallbackToCache, err := loadProvidersFromCache(path)
+ if err != nil {
+ return nil, err
}
-
- // Try to get providers from upstream API
- if providers, err := fur.GetProviders(); err == nil {
- providerList = providers
- // Save providers locally for future fallback
- _ = saveProviders(providers)
- } else {
- // If upstream fails, try to load from local cache
- if localProviders, localErr := loadProviders(); localErr == nil {
- providerList = localProviders
- } else {
- // If both fail, return empty list
- providerList = []provider.Provider{}
- }
+ providers = fallbackToCache
+ } else {
+ if err := saveProvidersInCache(path, providerList); err != nil {
+ return nil, err
}
- })
- return providerList
+ }
+ return providers, nil
+}
+
+func Providers() ([]provider.Provider, error) {
+ return LoadProviders(client.New())
}
-// ResetProviders resets the provider cache. Useful for testing.
-func ResetProviders() {
- providerOnc = sync.Once{}
- providerList = nil
+func LoadProviders(client ProviderClient) ([]provider.Provider, error) {
+ var err error
+ providerOnce.Do(func() {
+ providerList, err = loadProviders(providerCacheFileData(), client)
+ })
+ if err != nil {
+ return nil, err
+ }
+ return providerList, nil
}
@@ -1,293 +0,0 @@
-package config
-
-import (
- "github.com/charmbracelet/crush/internal/fur/provider"
-)
-
-// MockProviders returns a mock list of providers for testing.
-// This avoids making API calls during tests and provides consistent test data.
-// Simplified version with only default models from each provider.
-func MockProviders() []provider.Provider {
- return []provider.Provider{
- {
- Name: "Anthropic",
- ID: provider.InferenceProviderAnthropic,
- APIKey: "$ANTHROPIC_API_KEY",
- APIEndpoint: "$ANTHROPIC_API_ENDPOINT",
- Type: provider.TypeAnthropic,
- DefaultLargeModelID: "claude-sonnet-4-20250514",
- DefaultSmallModelID: "claude-3-5-haiku-20241022",
- Models: []provider.Model{
- {
- ID: "claude-sonnet-4-20250514",
- Name: "Claude Sonnet 4",
- CostPer1MIn: 3.0,
- CostPer1MOut: 15.0,
- CostPer1MInCached: 3.75,
- CostPer1MOutCached: 0.3,
- ContextWindow: 200000,
- DefaultMaxTokens: 50000,
- CanReason: true,
- SupportsImages: true,
- },
- {
- ID: "claude-3-5-haiku-20241022",
- Name: "Claude 3.5 Haiku",
- CostPer1MIn: 0.8,
- CostPer1MOut: 4.0,
- CostPer1MInCached: 1.0,
- CostPer1MOutCached: 0.08,
- ContextWindow: 200000,
- DefaultMaxTokens: 5000,
- CanReason: false,
- SupportsImages: true,
- },
- },
- },
- {
- Name: "OpenAI",
- ID: provider.InferenceProviderOpenAI,
- APIKey: "$OPENAI_API_KEY",
- APIEndpoint: "$OPENAI_API_ENDPOINT",
- Type: provider.TypeOpenAI,
- DefaultLargeModelID: "codex-mini-latest",
- DefaultSmallModelID: "gpt-4o",
- Models: []provider.Model{
- {
- ID: "codex-mini-latest",
- Name: "Codex Mini",
- CostPer1MIn: 1.5,
- CostPer1MOut: 6.0,
- CostPer1MInCached: 0.0,
- CostPer1MOutCached: 0.375,
- ContextWindow: 200000,
- DefaultMaxTokens: 50000,
- CanReason: true,
- HasReasoningEffort: true,
- DefaultReasoningEffort: "medium",
- SupportsImages: true,
- },
- {
- ID: "gpt-4o",
- Name: "GPT-4o",
- CostPer1MIn: 2.5,
- CostPer1MOut: 10.0,
- CostPer1MInCached: 0.0,
- CostPer1MOutCached: 1.25,
- ContextWindow: 128000,
- DefaultMaxTokens: 20000,
- CanReason: false,
- SupportsImages: true,
- },
- },
- },
- {
- Name: "Google Gemini",
- ID: provider.InferenceProviderGemini,
- APIKey: "$GEMINI_API_KEY",
- APIEndpoint: "$GEMINI_API_ENDPOINT",
- Type: provider.TypeGemini,
- DefaultLargeModelID: "gemini-2.5-pro",
- DefaultSmallModelID: "gemini-2.5-flash",
- Models: []provider.Model{
- {
- ID: "gemini-2.5-pro",
- Name: "Gemini 2.5 Pro",
- CostPer1MIn: 1.25,
- CostPer1MOut: 10.0,
- CostPer1MInCached: 1.625,
- CostPer1MOutCached: 0.31,
- ContextWindow: 1048576,
- DefaultMaxTokens: 50000,
- CanReason: true,
- SupportsImages: true,
- },
- {
- ID: "gemini-2.5-flash",
- Name: "Gemini 2.5 Flash",
- CostPer1MIn: 0.3,
- CostPer1MOut: 2.5,
- CostPer1MInCached: 0.3833,
- CostPer1MOutCached: 0.075,
- ContextWindow: 1048576,
- DefaultMaxTokens: 50000,
- CanReason: true,
- SupportsImages: true,
- },
- },
- },
- {
- Name: "xAI",
- ID: provider.InferenceProviderXAI,
- APIKey: "$XAI_API_KEY",
- APIEndpoint: "https://api.x.ai/v1",
- Type: provider.TypeXAI,
- DefaultLargeModelID: "grok-3",
- DefaultSmallModelID: "grok-3-mini",
- Models: []provider.Model{
- {
- ID: "grok-3",
- Name: "Grok 3",
- CostPer1MIn: 3.0,
- CostPer1MOut: 15.0,
- CostPer1MInCached: 0.0,
- CostPer1MOutCached: 0.75,
- ContextWindow: 131072,
- DefaultMaxTokens: 20000,
- CanReason: false,
- SupportsImages: false,
- },
- {
- ID: "grok-3-mini",
- Name: "Grok 3 Mini",
- CostPer1MIn: 0.3,
- CostPer1MOut: 0.5,
- CostPer1MInCached: 0.0,
- CostPer1MOutCached: 0.075,
- ContextWindow: 131072,
- DefaultMaxTokens: 20000,
- CanReason: true,
- SupportsImages: false,
- },
- },
- },
- {
- Name: "Azure OpenAI",
- ID: provider.InferenceProviderAzure,
- APIKey: "$AZURE_OPENAI_API_KEY",
- APIEndpoint: "$AZURE_OPENAI_API_ENDPOINT",
- Type: provider.TypeAzure,
- DefaultLargeModelID: "o4-mini",
- DefaultSmallModelID: "gpt-4o",
- Models: []provider.Model{
- {
- ID: "o4-mini",
- Name: "o4 Mini",
- CostPer1MIn: 1.1,
- CostPer1MOut: 4.4,
- CostPer1MInCached: 0.0,
- CostPer1MOutCached: 0.275,
- ContextWindow: 200000,
- DefaultMaxTokens: 50000,
- CanReason: true,
- HasReasoningEffort: false,
- DefaultReasoningEffort: "medium",
- SupportsImages: true,
- },
- {
- ID: "gpt-4o",
- Name: "GPT-4o",
- CostPer1MIn: 2.5,
- CostPer1MOut: 10.0,
- CostPer1MInCached: 0.0,
- CostPer1MOutCached: 1.25,
- ContextWindow: 128000,
- DefaultMaxTokens: 20000,
- CanReason: false,
- SupportsImages: true,
- },
- },
- },
- {
- Name: "AWS Bedrock",
- ID: provider.InferenceProviderBedrock,
- Type: provider.TypeBedrock,
- DefaultLargeModelID: "anthropic.claude-sonnet-4-20250514-v1:0",
- DefaultSmallModelID: "anthropic.claude-3-5-haiku-20241022-v1:0",
- Models: []provider.Model{
- {
- ID: "anthropic.claude-sonnet-4-20250514-v1:0",
- Name: "AWS Claude Sonnet 4",
- CostPer1MIn: 3.0,
- CostPer1MOut: 15.0,
- CostPer1MInCached: 3.75,
- CostPer1MOutCached: 0.3,
- ContextWindow: 200000,
- DefaultMaxTokens: 50000,
- CanReason: true,
- SupportsImages: true,
- },
- {
- ID: "anthropic.claude-3-5-haiku-20241022-v1:0",
- Name: "AWS Claude 3.5 Haiku",
- CostPer1MIn: 0.8,
- CostPer1MOut: 4.0,
- CostPer1MInCached: 1.0,
- CostPer1MOutCached: 0.08,
- ContextWindow: 200000,
- DefaultMaxTokens: 50000,
- CanReason: false,
- SupportsImages: true,
- },
- },
- },
- {
- Name: "Google Vertex AI",
- ID: provider.InferenceProviderVertexAI,
- Type: provider.TypeVertexAI,
- DefaultLargeModelID: "gemini-2.5-pro",
- DefaultSmallModelID: "gemini-2.5-flash",
- Models: []provider.Model{
- {
- ID: "gemini-2.5-pro",
- Name: "Gemini 2.5 Pro",
- CostPer1MIn: 1.25,
- CostPer1MOut: 10.0,
- CostPer1MInCached: 1.625,
- CostPer1MOutCached: 0.31,
- ContextWindow: 1048576,
- DefaultMaxTokens: 50000,
- CanReason: true,
- SupportsImages: true,
- },
- {
- ID: "gemini-2.5-flash",
- Name: "Gemini 2.5 Flash",
- CostPer1MIn: 0.3,
- CostPer1MOut: 2.5,
- CostPer1MInCached: 0.3833,
- CostPer1MOutCached: 0.075,
- ContextWindow: 1048576,
- DefaultMaxTokens: 50000,
- CanReason: true,
- SupportsImages: true,
- },
- },
- },
- {
- Name: "OpenRouter",
- ID: provider.InferenceProviderOpenRouter,
- APIKey: "$OPENROUTER_API_KEY",
- APIEndpoint: "https://openrouter.ai/api/v1",
- Type: provider.TypeOpenAI,
- DefaultLargeModelID: "anthropic/claude-sonnet-4",
- DefaultSmallModelID: "anthropic/claude-haiku-3.5",
- Models: []provider.Model{
- {
- ID: "anthropic/claude-sonnet-4",
- Name: "Anthropic: Claude Sonnet 4",
- CostPer1MIn: 3.0,
- CostPer1MOut: 15.0,
- CostPer1MInCached: 3.75,
- CostPer1MOutCached: 0.3,
- ContextWindow: 200000,
- DefaultMaxTokens: 32000,
- CanReason: true,
- SupportsImages: true,
- },
- {
- ID: "anthropic/claude-haiku-3.5",
- Name: "Anthropic: Claude 3.5 Haiku",
- CostPer1MIn: 0.8,
- CostPer1MOut: 4.0,
- CostPer1MInCached: 1.0,
- CostPer1MOutCached: 0.08,
- ContextWindow: 200000,
- DefaultMaxTokens: 4096,
- CanReason: false,
- SupportsImages: true,
- },
- },
- },
- }
-}
@@ -1,81 +1,73 @@
package config
import (
+ "encoding/json"
+ "errors"
+ "os"
"testing"
"github.com/charmbracelet/crush/internal/fur/provider"
"github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
)
-func TestProviders_MockEnabled(t *testing.T) {
- originalUseMock := UseMockProviders
- UseMockProviders = true
- defer func() {
- UseMockProviders = originalUseMock
- ResetProviders()
- }()
-
- ResetProviders()
- providers := Providers()
- require.NotEmpty(t, providers)
+type mockProviderClient struct {
+ shouldFail bool
+}
- providerIDs := make(map[provider.InferenceProvider]bool)
- for _, p := range providers {
- providerIDs[p.ID] = true
+func (m *mockProviderClient) GetProviders() ([]provider.Provider, error) {
+ if m.shouldFail {
+ return nil, errors.New("failed to load providers")
}
-
- assert.True(t, providerIDs[provider.InferenceProviderAnthropic])
- assert.True(t, providerIDs[provider.InferenceProviderOpenAI])
- assert.True(t, providerIDs[provider.InferenceProviderGemini])
+ return []provider.Provider{
+ {
+ Name: "Mock",
+ },
+ }, nil
}
-func TestProviders_ResetFunctionality(t *testing.T) {
- UseMockProviders = true
- defer func() {
- UseMockProviders = false
- ResetProviders()
- }()
-
- providers1 := Providers()
- require.NotEmpty(t, providers1)
-
- ResetProviders()
- providers2 := Providers()
- require.NotEmpty(t, providers2)
+func TestProvider_loadProvidersNoIssues(t *testing.T) {
+ client := &mockProviderClient{shouldFail: false}
+ tmpPath := t.TempDir() + "/providers.json"
+ providers, err := loadProviders(tmpPath, client)
+ assert.NoError(t, err)
+ assert.NotNil(t, providers)
+ assert.Len(t, providers, 1)
- assert.Equal(t, len(providers1), len(providers2))
+ // check if file got saved
+ fileInfo, err := os.Stat(tmpPath)
+ assert.NoError(t, err)
+ assert.False(t, fileInfo.IsDir(), "Expected a file, not a directory")
}
-func TestProviders_ModelCapabilities(t *testing.T) {
- originalUseMock := UseMockProviders
- UseMockProviders = true
- defer func() {
- UseMockProviders = originalUseMock
- ResetProviders()
- }()
-
- ResetProviders()
- providers := Providers()
-
- var openaiProvider provider.Provider
- for _, p := range providers {
- if p.ID == provider.InferenceProviderOpenAI {
- openaiProvider = p
- break
- }
+func TestProvider_loadProvidersWithIssues(t *testing.T) {
+ client := &mockProviderClient{shouldFail: true}
+ tmpPath := t.TempDir() + "/providers.json"
+ // store providers to a temporary file
+ oldProviders := []provider.Provider{
+ {
+ Name: "OldProvider",
+ },
+ }
+ data, err := json.Marshal(oldProviders)
+ if err != nil {
+ t.Fatalf("Failed to marshal old providers: %v", err)
}
- require.NotEmpty(t, openaiProvider.ID)
- var foundReasoning, foundNonReasoning bool
- for _, model := range openaiProvider.Models {
- if model.CanReason && model.HasReasoningEffort {
- foundReasoning = true
- } else if !model.CanReason {
- foundNonReasoning = true
- }
+ err = os.WriteFile(tmpPath, data, 0o644)
+ if err != nil {
+ t.Fatalf("Failed to write old providers to file: %v", err)
}
+ providers, err := loadProviders(tmpPath, client)
+ assert.NoError(t, err)
+ assert.NotNil(t, providers)
+ assert.Len(t, providers, 1)
+ assert.Equal(t, "OldProvider", providers[0].Name, "Expected to keep old provider when loading fails")
+}
- assert.True(t, foundReasoning)
- assert.True(t, foundNonReasoning)
+func TestProvider_loadProvidersWithIssuesAndNoCache(t *testing.T) {
+ client := &mockProviderClient{shouldFail: true}
+ tmpPath := t.TempDir() + "/providers.json"
+ providers, err := loadProviders(tmpPath, client)
+ assert.Error(t, err)
+ assert.Nil(t, providers, "Expected nil providers when loading fails and no cache exists")
}
@@ -0,0 +1,90 @@
+package config
+
+import (
+ "context"
+ "fmt"
+ "strings"
+ "time"
+
+ "github.com/charmbracelet/crush/internal/env"
+ "github.com/charmbracelet/crush/internal/shell"
+)
+
+type VariableResolver interface {
+ ResolveValue(value string) (string, error)
+}
+
+type Shell interface {
+ Exec(ctx context.Context, command string) (stdout, stderr string, err error)
+}
+
+type shellVariableResolver struct {
+ shell Shell
+ env env.Env
+}
+
+func NewShellVariableResolver(env env.Env) VariableResolver {
+ return &shellVariableResolver{
+ env: env,
+ shell: shell.NewShell(
+ &shell.Options{
+ Env: env.Env(),
+ },
+ ),
+ }
+}
+
+// ResolveValue is a method for resolving values, such as environment variables.
+// it will expect strings that start with `$` to be resolved as environment variables or shell commands.
+// if the string does not start with `$`, it will return the string as is.
+func (r *shellVariableResolver) ResolveValue(value string) (string, error) {
+ if !strings.HasPrefix(value, "$") {
+ return value, nil
+ }
+
+ if strings.HasPrefix(value, "$(") && strings.HasSuffix(value, ")") {
+ command := strings.TrimSuffix(strings.TrimPrefix(value, "$("), ")")
+ ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
+ defer cancel()
+
+ stdout, _, err := r.shell.Exec(ctx, command)
+ if err != nil {
+ return "", fmt.Errorf("command execution failed: %w", err)
+ }
+ return strings.TrimSpace(stdout), nil
+ }
+
+ if strings.HasPrefix(value, "$") {
+ varName := strings.TrimPrefix(value, "$")
+ value = r.env.Get(varName)
+ if value == "" {
+ return "", fmt.Errorf("environment variable %q not set", varName)
+ }
+ return value, nil
+ }
+ return "", fmt.Errorf("invalid value format: %s", value)
+}
+
+type environmentVariableResolver struct {
+ env env.Env
+}
+
+func NewEnvironmentVariableResolver(env env.Env) VariableResolver {
+ return &environmentVariableResolver{
+ env: env,
+ }
+}
+
+// ResolveValue resolves environment variables from the provided env.Env.
+func (r *environmentVariableResolver) ResolveValue(value string) (string, error) {
+ if !strings.HasPrefix(value, "$") {
+ return value, nil
+ }
+
+ varName := strings.TrimPrefix(value, "$")
+ resolvedValue := r.env.Get(varName)
+ if resolvedValue == "" {
+ return "", fmt.Errorf("environment variable %q not set", varName)
+ }
+ return resolvedValue, nil
+}
@@ -0,0 +1,177 @@
+package config
+
+import (
+ "context"
+ "errors"
+ "testing"
+
+ "github.com/charmbracelet/crush/internal/env"
+ "github.com/stretchr/testify/assert"
+)
+
+// mockShell implements the Shell interface for testing
+type mockShell struct {
+ execFunc func(ctx context.Context, command string) (stdout, stderr string, err error)
+}
+
+func (m *mockShell) Exec(ctx context.Context, command string) (stdout, stderr string, err error) {
+ if m.execFunc != nil {
+ return m.execFunc(ctx, command)
+ }
+ return "", "", nil
+}
+
+func TestShellVariableResolver_ResolveValue(t *testing.T) {
+ tests := []struct {
+ name string
+ value string
+ envVars map[string]string
+ shellFunc func(ctx context.Context, command string) (stdout, stderr string, err error)
+ expected string
+ expectError bool
+ }{
+ {
+ name: "non-variable string returns as-is",
+ value: "plain-string",
+ expected: "plain-string",
+ },
+ {
+ name: "environment variable resolution",
+ value: "$HOME",
+ envVars: map[string]string{"HOME": "/home/user"},
+ expected: "/home/user",
+ },
+ {
+ name: "missing environment variable returns error",
+ value: "$MISSING_VAR",
+ envVars: map[string]string{},
+ expectError: true,
+ },
+ {
+ name: "shell command execution",
+ value: "$(echo hello)",
+ shellFunc: func(ctx context.Context, command string) (stdout, stderr string, err error) {
+ if command == "echo hello" {
+ return "hello\n", "", nil
+ }
+ return "", "", errors.New("unexpected command")
+ },
+ expected: "hello",
+ },
+ {
+ name: "shell command with whitespace trimming",
+ value: "$(echo ' spaced ')",
+ shellFunc: func(ctx context.Context, command string) (stdout, stderr string, err error) {
+ if command == "echo ' spaced '" {
+ return " spaced \n", "", nil
+ }
+ return "", "", errors.New("unexpected command")
+ },
+ expected: "spaced",
+ },
+ {
+ name: "shell command execution error",
+ value: "$(false)",
+ shellFunc: func(ctx context.Context, command string) (stdout, stderr string, err error) {
+ return "", "", errors.New("command failed")
+ },
+ expectError: true,
+ },
+ {
+ name: "invalid format returns error",
+ value: "$",
+ expectError: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ testEnv := env.NewFromMap(tt.envVars)
+ resolver := &shellVariableResolver{
+ shell: &mockShell{execFunc: tt.shellFunc},
+ env: testEnv,
+ }
+
+ result, err := resolver.ResolveValue(tt.value)
+
+ if tt.expectError {
+ assert.Error(t, err)
+ } else {
+ assert.NoError(t, err)
+ assert.Equal(t, tt.expected, result)
+ }
+ })
+ }
+}
+
+func TestEnvironmentVariableResolver_ResolveValue(t *testing.T) {
+ tests := []struct {
+ name string
+ value string
+ envVars map[string]string
+ expected string
+ expectError bool
+ }{
+ {
+ name: "non-variable string returns as-is",
+ value: "plain-string",
+ expected: "plain-string",
+ },
+ {
+ name: "environment variable resolution",
+ value: "$HOME",
+ envVars: map[string]string{"HOME": "/home/user"},
+ expected: "/home/user",
+ },
+ {
+ name: "environment variable with complex value",
+ value: "$PATH",
+ envVars: map[string]string{"PATH": "/usr/bin:/bin:/usr/local/bin"},
+ expected: "/usr/bin:/bin:/usr/local/bin",
+ },
+ {
+ name: "missing environment variable returns error",
+ value: "$MISSING_VAR",
+ envVars: map[string]string{},
+ expectError: true,
+ },
+ {
+ name: "empty environment variable returns error",
+ value: "$EMPTY_VAR",
+ envVars: map[string]string{"EMPTY_VAR": ""},
+ expectError: true,
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ testEnv := env.NewFromMap(tt.envVars)
+ resolver := NewEnvironmentVariableResolver(testEnv)
+
+ result, err := resolver.ResolveValue(tt.value)
+
+ if tt.expectError {
+ assert.Error(t, err)
+ } else {
+ assert.NoError(t, err)
+ assert.Equal(t, tt.expected, result)
+ }
+ })
+ }
+}
+
+func TestNewShellVariableResolver(t *testing.T) {
+ testEnv := env.NewFromMap(map[string]string{"TEST": "value"})
+ resolver := NewShellVariableResolver(testEnv)
+
+ assert.NotNil(t, resolver)
+ assert.Implements(t, (*VariableResolver)(nil), resolver)
+}
+
+func TestNewEnvironmentVariableResolver(t *testing.T) {
+ testEnv := env.NewFromMap(map[string]string{"TEST": "value"})
+ resolver := NewEnvironmentVariableResolver(testEnv)
+
+ assert.NotNil(t, resolver)
+ assert.Implements(t, (*VariableResolver)(nil), resolver)
+}
@@ -1,73 +0,0 @@
-package config
-
-import (
- "context"
- "fmt"
- "os"
- "strings"
- "time"
-
- "github.com/charmbracelet/crush/internal/logging"
- "github.com/charmbracelet/crush/internal/shell"
-)
-
-// ExecuteCommand executes a shell command and returns the output
-// This is a shared utility that can be used by both provider config and tools
-func ExecuteCommand(ctx context.Context, command string, workingDir string) (string, error) {
- if workingDir == "" {
- workingDir = WorkingDirectory()
- }
-
- persistentShell := shell.NewShell(&shell.Options{WorkingDir: workingDir})
-
- stdout, stderr, err := persistentShell.Exec(ctx, command)
- if err != nil {
- logging.Debug("Command execution failed", "command", command, "error", err, "stderr", stderr)
- return "", fmt.Errorf("command execution failed: %w", err)
- }
-
- return strings.TrimSpace(stdout), nil
-}
-
-// ResolveAPIKey resolves an API key that can be either:
-// - A direct string value
-// - An environment variable (prefixed with $)
-// - A shell command (wrapped in $(...))
-func ResolveAPIKey(apiKey string) (string, error) {
- if !strings.HasPrefix(apiKey, "$") {
- return apiKey, nil
- }
-
- if strings.HasPrefix(apiKey, "$(") && strings.HasSuffix(apiKey, ")") {
- command := strings.TrimSuffix(strings.TrimPrefix(apiKey, "$("), ")")
- logging.Debug("Resolving API key from command", "command", command)
- return resolveCommandAPIKey(command)
- }
-
- envVar := strings.TrimPrefix(apiKey, "$")
- if value := os.Getenv(envVar); value != "" {
- logging.Debug("Resolved environment variable", "envVar", envVar, "value", value)
- return value, nil
- }
-
- logging.Debug("Environment variable not found", "envVar", envVar)
-
- return "", fmt.Errorf("environment variable %s not found", envVar)
-}
-
-// resolveCommandAPIKey executes a command to get an API key, with caching support
-func resolveCommandAPIKey(command string) (string, error) {
- ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
- defer cancel()
-
- logging.Debug("Executing command for API key", "command", command)
-
- workingDir := WorkingDirectory()
-
- result, err := ExecuteCommand(ctx, command, workingDir)
- if err != nil {
- return "", fmt.Errorf("failed to execute API key command: %w", err)
- }
- logging.Debug("Command executed successfully", "command", command, "result", result)
- return result, nil
-}
@@ -1,462 +0,0 @@
-package config
-
-import (
- "testing"
-
- "github.com/charmbracelet/crush/internal/fur/provider"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-func TestConfig_Validate_ValidConfig(t *testing.T) {
- cfg := &Config{
- Models: PreferredModels{
- Large: PreferredModel{
- ModelID: "gpt-4",
- Provider: provider.InferenceProviderOpenAI,
- },
- Small: PreferredModel{
- ModelID: "gpt-3.5-turbo",
- Provider: provider.InferenceProviderOpenAI,
- },
- },
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "gpt-4",
- DefaultSmallModel: "gpt-3.5-turbo",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- CostPer1MIn: 30.0,
- CostPer1MOut: 60.0,
- },
- {
- ID: "gpt-3.5-turbo",
- Name: "GPT-3.5 Turbo",
- ContextWindow: 4096,
- DefaultMaxTokens: 2048,
- CostPer1MIn: 1.5,
- CostPer1MOut: 2.0,
- },
- },
- },
- },
- Agents: map[AgentID]Agent{
- AgentCoder: {
- ID: AgentCoder,
- Name: "Coder",
- Description: "An agent that helps with executing coding tasks.",
- Model: LargeModel,
- ContextPaths: []string{"CRUSH.md"},
- },
- AgentTask: {
- ID: AgentTask,
- Name: "Task",
- Description: "An agent that helps with searching for context and finding implementation details.",
- Model: LargeModel,
- ContextPaths: []string{"CRUSH.md"},
- AllowedTools: []string{"glob", "grep", "ls", "sourcegraph", "view"},
- AllowedMCP: map[string][]string{},
- AllowedLSP: []string{},
- },
- },
- MCP: map[string]MCP{},
- LSP: map[string]LSPConfig{},
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- assert.NoError(t, err)
-}
-
-func TestConfig_Validate_MissingAPIKey(t *testing.T) {
- cfg := &Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- ProviderType: provider.TypeOpenAI,
- // Missing APIKey
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "API key is required")
-}
-
-func TestConfig_Validate_InvalidProviderType(t *testing.T) {
- cfg := &Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.Type("invalid"),
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "invalid provider type")
-}
-
-func TestConfig_Validate_CustomProviderMissingBaseURL(t *testing.T) {
- customProvider := provider.InferenceProvider("custom-provider")
- cfg := &Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- customProvider: {
- ID: customProvider,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- // Missing BaseURL for custom provider
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "BaseURL is required for custom providers")
-}
-
-func TestConfig_Validate_DuplicateModelIDs(t *testing.T) {
- cfg := &Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- {
- ID: "gpt-4", // Duplicate ID
- Name: "GPT-4 Duplicate",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "duplicate model ID")
-}
-
-func TestConfig_Validate_InvalidModelFields(t *testing.T) {
- cfg := &Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- Models: []Model{
- {
- ID: "", // Empty ID
- Name: "GPT-4",
- ContextWindow: 0, // Invalid context window
- DefaultMaxTokens: -1, // Invalid max tokens
- CostPer1MIn: -5.0, // Negative cost
- },
- },
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- validationErr := err.(ValidationErrors)
- assert.True(t, len(validationErr) >= 4) // Should have multiple validation errors
-}
-
-func TestConfig_Validate_DefaultModelNotFound(t *testing.T) {
- cfg := &Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- DefaultLargeModel: "nonexistent-model",
- Models: []Model{
- {
- ID: "gpt-4",
- Name: "GPT-4",
- ContextWindow: 8192,
- DefaultMaxTokens: 4096,
- },
- },
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "default large model 'nonexistent-model' not found")
-}
-
-func TestConfig_Validate_AgentIDMismatch(t *testing.T) {
- cfg := &Config{
- Agents: map[AgentID]Agent{
- AgentCoder: {
- ID: AgentTask, // Wrong ID
- Name: "Coder",
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "agent ID mismatch")
-}
-
-func TestConfig_Validate_InvalidAgentModelType(t *testing.T) {
- cfg := &Config{
- Agents: map[AgentID]Agent{
- AgentCoder: {
- ID: AgentCoder,
- Name: "Coder",
- Model: ModelType("invalid"),
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "invalid model type")
-}
-
-func TestConfig_Validate_UnknownTool(t *testing.T) {
- cfg := &Config{
- Agents: map[AgentID]Agent{
- AgentID("custom-agent"): {
- ID: AgentID("custom-agent"),
- Name: "Custom Agent",
- Model: LargeModel,
- AllowedTools: []string{"unknown-tool"},
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "unknown tool")
-}
-
-func TestConfig_Validate_MCPReference(t *testing.T) {
- cfg := &Config{
- Agents: map[AgentID]Agent{
- AgentID("custom-agent"): {
- ID: AgentID("custom-agent"),
- Name: "Custom Agent",
- Model: LargeModel,
- AllowedMCP: map[string][]string{"nonexistent-mcp": nil},
- },
- },
- MCP: map[string]MCP{}, // Empty MCP map
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "referenced MCP 'nonexistent-mcp' not found")
-}
-
-func TestConfig_Validate_InvalidMCPType(t *testing.T) {
- cfg := &Config{
- MCP: map[string]MCP{
- "test-mcp": {
- Type: MCPType("invalid"),
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "invalid MCP type")
-}
-
-func TestConfig_Validate_MCPMissingCommand(t *testing.T) {
- cfg := &Config{
- MCP: map[string]MCP{
- "test-mcp": {
- Type: MCPStdio,
- // Missing Command
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "command is required for stdio MCP")
-}
-
-func TestConfig_Validate_LSPMissingCommand(t *testing.T) {
- cfg := &Config{
- LSP: map[string]LSPConfig{
- "test-lsp": {
- // Missing Command
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "command is required for LSP")
-}
-
-func TestConfig_Validate_NoValidProviders(t *testing.T) {
- cfg := &Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- Disabled: true, // Disabled
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "at least one non-disabled provider is required")
-}
-
-func TestConfig_Validate_MissingDefaultAgents(t *testing.T) {
- cfg := &Config{
- Providers: map[provider.InferenceProvider]ProviderConfig{
- provider.InferenceProviderOpenAI: {
- ID: provider.InferenceProviderOpenAI,
- APIKey: "test-key",
- ProviderType: provider.TypeOpenAI,
- },
- },
- Agents: map[AgentID]Agent{}, // Missing default agents
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "coder agent is required")
- assert.Contains(t, err.Error(), "task agent is required")
-}
-
-func TestConfig_Validate_KnownAgentProtection(t *testing.T) {
- cfg := &Config{
- Agents: map[AgentID]Agent{
- AgentCoder: {
- ID: AgentCoder,
- Name: "Modified Coder", // Should not be allowed
- Description: "Modified description", // Should not be allowed
- Model: LargeModel,
- },
- },
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "coder agent name cannot be changed")
- assert.Contains(t, err.Error(), "coder agent description cannot be changed")
-}
-
-func TestConfig_Validate_EmptyDataDirectory(t *testing.T) {
- cfg := &Config{
- Options: Options{
- DataDirectory: "", // Empty
- ContextPaths: []string{"CRUSH.md"},
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "data directory is required")
-}
-
-func TestConfig_Validate_EmptyContextPath(t *testing.T) {
- cfg := &Config{
- Options: Options{
- DataDirectory: ".crush",
- ContextPaths: []string{""}, // Empty context path
- },
- }
-
- err := cfg.Validate()
- require.Error(t, err)
- assert.Contains(t, err.Error(), "context path cannot be empty")
-}
@@ -4,20 +4,17 @@ import (
"context"
"database/sql"
"fmt"
+ "log/slog"
"os"
"path/filepath"
_ "github.com/ncruces/go-sqlite3/driver"
_ "github.com/ncruces/go-sqlite3/embed"
- "github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/logging"
-
"github.com/pressly/goose/v3"
)
-func Connect(ctx context.Context) (*sql.DB, error) {
- dataDir := config.Get().Options.DataDirectory
+func Connect(ctx context.Context, dataDir string) (*sql.DB, error) {
if dataDir == "" {
return nil, fmt.Errorf("data.dir is not set")
}
@@ -48,21 +45,21 @@ func Connect(ctx context.Context) (*sql.DB, error) {
for _, pragma := range pragmas {
if _, err = db.ExecContext(ctx, pragma); err != nil {
- logging.Error("Failed to set pragma", pragma, err)
+ slog.Error("Failed to set pragma", pragma, err)
} else {
- logging.Debug("Set pragma", "pragma", pragma)
+ slog.Debug("Set pragma", "pragma", pragma)
}
}
goose.SetBaseFS(FS)
if err := goose.SetDialect("sqlite3"); err != nil {
- logging.Error("Failed to set dialect", "error", err)
+ slog.Error("Failed to set dialect", "error", err)
return nil, fmt.Errorf("failed to set dialect: %w", err)
}
if err := goose.Up(db, "migrations"); err != nil {
- logging.Error("Failed to apply migrations", "error", err)
+ slog.Error("Failed to apply migrations", "error", err)
return nil, fmt.Errorf("failed to apply migrations: %w", err)
}
return db, nil
@@ -4,15 +4,10 @@ import (
"strings"
"github.com/aymanbagabas/go-udiff"
- "github.com/charmbracelet/crush/internal/config"
)
// GenerateDiff creates a unified diff from two file contents
func GenerateDiff(beforeContent, afterContent, fileName string) (string, int, int) {
- // remove the cwd prefix and ensure consistent path format
- // this prevents issues with absolute paths in different environments
- cwd := config.WorkingDirectory()
- fileName = strings.TrimPrefix(fileName, cwd)
fileName = strings.TrimPrefix(fileName, "/")
var (
@@ -0,0 +1,58 @@
+package env
+
+import "os"
+
+type Env interface {
+ Get(key string) string
+ Env() []string
+}
+
+type osEnv struct{}
+
+// Get implements Env.
+func (o *osEnv) Get(key string) string {
+ return os.Getenv(key)
+}
+
+func (o *osEnv) Env() []string {
+ env := os.Environ()
+ if len(env) == 0 {
+ return nil
+ }
+ return env
+}
+
+func New() Env {
+ return &osEnv{}
+}
+
+type mapEnv struct {
+ m map[string]string
+}
+
+// Get implements Env.
+func (m *mapEnv) Get(key string) string {
+ if value, ok := m.m[key]; ok {
+ return value
+ }
+ return ""
+}
+
+// Env implements Env.
+func (m *mapEnv) Env() []string {
+ if len(m.m) == 0 {
+ return nil
+ }
+ env := make([]string, 0, len(m.m))
+ for k, v := range m.m {
+ env = append(env, k+"="+v)
+ }
+ return env
+}
+
+func NewFromMap(m map[string]string) Env {
+ if m == nil {
+ m = make(map[string]string)
+ }
+ return &mapEnv{m: m}
+}
@@ -0,0 +1,142 @@
+package env
+
+import (
+ "os"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestOsEnv_Get(t *testing.T) {
+ env := New()
+
+ // Test getting an existing environment variable
+ os.Setenv("TEST_VAR", "test_value")
+ defer os.Unsetenv("TEST_VAR")
+
+ value := env.Get("TEST_VAR")
+ assert.Equal(t, "test_value", value)
+
+ // Test getting a non-existent environment variable
+ value = env.Get("NON_EXISTENT_VAR")
+ assert.Equal(t, "", value)
+}
+
+func TestOsEnv_Env(t *testing.T) {
+ env := New()
+
+ envVars := env.Env()
+
+ // Environment should not be empty in normal circumstances
+ assert.NotNil(t, envVars)
+ assert.Greater(t, len(envVars), 0)
+
+ // Each environment variable should be in key=value format
+ for _, envVar := range envVars {
+ assert.Contains(t, envVar, "=")
+ }
+}
+
+func TestNewFromMap(t *testing.T) {
+ testMap := map[string]string{
+ "KEY1": "value1",
+ "KEY2": "value2",
+ }
+
+ env := NewFromMap(testMap)
+ assert.NotNil(t, env)
+ assert.IsType(t, &mapEnv{}, env)
+}
+
+func TestMapEnv_Get(t *testing.T) {
+ testMap := map[string]string{
+ "KEY1": "value1",
+ "KEY2": "value2",
+ }
+
+ env := NewFromMap(testMap)
+
+ // Test getting existing keys
+ assert.Equal(t, "value1", env.Get("KEY1"))
+ assert.Equal(t, "value2", env.Get("KEY2"))
+
+ // Test getting non-existent key
+ assert.Equal(t, "", env.Get("NON_EXISTENT"))
+}
+
+func TestMapEnv_Env(t *testing.T) {
+ t.Run("with values", func(t *testing.T) {
+ testMap := map[string]string{
+ "KEY1": "value1",
+ "KEY2": "value2",
+ }
+
+ env := NewFromMap(testMap)
+ envVars := env.Env()
+
+ assert.Len(t, envVars, 2)
+
+ // Convert to map for easier testing (order is not guaranteed)
+ envMap := make(map[string]string)
+ for _, envVar := range envVars {
+ parts := strings.SplitN(envVar, "=", 2)
+ assert.Len(t, parts, 2)
+ envMap[parts[0]] = parts[1]
+ }
+
+ assert.Equal(t, "value1", envMap["KEY1"])
+ assert.Equal(t, "value2", envMap["KEY2"])
+ })
+
+ t.Run("empty map", func(t *testing.T) {
+ env := NewFromMap(map[string]string{})
+ envVars := env.Env()
+ assert.Nil(t, envVars)
+ })
+
+ t.Run("nil map", func(t *testing.T) {
+ env := NewFromMap(nil)
+ envVars := env.Env()
+ assert.Nil(t, envVars)
+ })
+}
+
+func TestMapEnv_GetEmptyValue(t *testing.T) {
+ testMap := map[string]string{
+ "EMPTY_KEY": "",
+ "NORMAL_KEY": "value",
+ }
+
+ env := NewFromMap(testMap)
+
+ // Test that empty values are returned correctly
+ assert.Equal(t, "", env.Get("EMPTY_KEY"))
+ assert.Equal(t, "value", env.Get("NORMAL_KEY"))
+}
+
+func TestMapEnv_EnvFormat(t *testing.T) {
+ testMap := map[string]string{
+ "KEY_WITH_EQUALS": "value=with=equals",
+ "KEY_WITH_SPACES": "value with spaces",
+ }
+
+ env := NewFromMap(testMap)
+ envVars := env.Env()
+
+ assert.Len(t, envVars, 2)
+
+ // Check that the format is correct even with special characters
+ found := make(map[string]bool)
+ for _, envVar := range envVars {
+ if envVar == "KEY_WITH_EQUALS=value=with=equals" {
+ found["equals"] = true
+ }
+ if envVar == "KEY_WITH_SPACES=value with spaces" {
+ found["spaces"] = true
+ }
+ }
+
+ assert.True(t, found["equals"], "Should handle values with equals signs")
+ assert.True(t, found["spaces"], "Should handle values with spaces")
+}
@@ -2,6 +2,7 @@ package fsext
import (
"fmt"
+ "log/slog"
"os"
"os/exec"
"path/filepath"
@@ -11,7 +12,7 @@ import (
"github.com/bmatcuk/doublestar/v4"
"github.com/charlievieth/fastwalk"
- "github.com/charmbracelet/crush/internal/logging"
+
ignore "github.com/sabhiram/go-gitignore"
)
@@ -24,11 +25,11 @@ func init() {
var err error
rgPath, err = exec.LookPath("rg")
if err != nil {
- logging.Warn("Ripgrep (rg) not found in $PATH. Some features might be limited or slower.")
+ slog.Warn("Ripgrep (rg) not found in $PATH. Some features might be limited or slower.")
}
fzfPath, err = exec.LookPath("fzf")
if err != nil {
- logging.Warn("FZF not found in $PATH. Some features might be limited or slower.")
+ slog.Warn("FZF not found in $PATH. Some features might be limited or slower.")
}
}
@@ -45,7 +45,7 @@ type Provider struct {
// Model represents an AI model configuration.
type Model struct {
ID string `json:"id"`
- Name string `json:"model"`
+ Model string `json:"model"`
CostPer1MIn float64 `json:"cost_per_1m_in"`
CostPer1MOut float64 `json:"cost_per_1m_out"`
CostPer1MInCached float64 `json:"cost_per_1m_in_cached"`
@@ -4,17 +4,19 @@ import (
"context"
"errors"
"fmt"
+ "log/slog"
"slices"
"strings"
"sync"
"time"
"github.com/charmbracelet/crush/internal/config"
+ fur "github.com/charmbracelet/crush/internal/fur/provider"
"github.com/charmbracelet/crush/internal/history"
"github.com/charmbracelet/crush/internal/llm/prompt"
"github.com/charmbracelet/crush/internal/llm/provider"
"github.com/charmbracelet/crush/internal/llm/tools"
- "github.com/charmbracelet/crush/internal/logging"
+ "github.com/charmbracelet/crush/internal/log"
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/permission"
@@ -49,7 +51,7 @@ type AgentEvent struct {
type Service interface {
pubsub.Suscriber[AgentEvent]
- Model() config.Model
+ Model() fur.Model
Run(ctx context.Context, sessionID string, content string, attachments ...message.Attachment) (<-chan AgentEvent, error)
Cancel(sessionID string)
CancelAll()
@@ -76,9 +78,9 @@ type agent struct {
activeRequests sync.Map
}
-var agentPromptMap = map[config.AgentID]prompt.PromptID{
- config.AgentCoder: prompt.PromptCoder,
- config.AgentTask: prompt.PromptTask,
+var agentPromptMap = map[string]prompt.PromptID{
+ "coder": prompt.PromptCoder,
+ "task": prompt.PromptTask,
}
func NewAgent(
@@ -92,25 +94,26 @@ func NewAgent(
) (Service, error) {
ctx := context.Background()
cfg := config.Get()
- otherTools := GetMcpTools(ctx, permissions)
+ otherTools := GetMcpTools(ctx, permissions, cfg)
if len(lspClients) > 0 {
otherTools = append(otherTools, tools.NewDiagnosticsTool(lspClients))
}
+ cwd := cfg.WorkingDir()
allTools := []tools.BaseTool{
- tools.NewBashTool(permissions),
- tools.NewEditTool(lspClients, permissions, history),
- tools.NewFetchTool(permissions),
- tools.NewGlobTool(),
- tools.NewGrepTool(),
- tools.NewLsTool(),
+ tools.NewBashTool(permissions, cwd),
+ tools.NewEditTool(lspClients, permissions, history, cwd),
+ tools.NewFetchTool(permissions, cwd),
+ tools.NewGlobTool(cwd),
+ tools.NewGrepTool(cwd),
+ tools.NewLsTool(cwd),
tools.NewSourcegraphTool(),
- tools.NewViewTool(lspClients),
- tools.NewWriteTool(lspClients, permissions, history),
+ tools.NewViewTool(lspClients, cwd),
+ tools.NewWriteTool(lspClients, permissions, history, cwd),
}
- if agentCfg.ID == config.AgentCoder {
- taskAgentCfg := config.Get().Agents[config.AgentTask]
+ if agentCfg.ID == "coder" {
+ taskAgentCfg := config.Get().Agents["task"]
if taskAgentCfg.ID == "" {
return nil, fmt.Errorf("task agent not found in config")
}
@@ -130,13 +133,13 @@ func NewAgent(
}
allTools = append(allTools, otherTools...)
- providerCfg := config.GetAgentProvider(agentCfg.ID)
- if providerCfg.ID == "" {
+ providerCfg := config.Get().GetProviderForModel(agentCfg.Model)
+ if providerCfg == nil {
return nil, fmt.Errorf("provider for agent %s not found in config", agentCfg.Name)
}
- model := config.GetAgentModel(agentCfg.ID)
+ model := config.Get().GetModelByType(agentCfg.Model)
- if model.ID == "" {
+ if model == nil {
return nil, fmt.Errorf("model not found for agent %s", agentCfg.Name)
}
@@ -148,51 +151,40 @@ func NewAgent(
provider.WithModel(agentCfg.Model),
provider.WithSystemMessage(prompt.GetPrompt(promptID, providerCfg.ID)),
}
- agentProvider, err := provider.NewProvider(providerCfg, opts...)
+ agentProvider, err := provider.NewProvider(*providerCfg, opts...)
if err != nil {
return nil, err
}
- smallModelCfg := cfg.Models.Small
- var smallModel config.Model
-
- var smallModelProviderCfg config.ProviderConfig
+ smallModelCfg := cfg.Models[config.SelectedModelTypeSmall]
+ var smallModelProviderCfg *config.ProviderConfig
if smallModelCfg.Provider == providerCfg.ID {
smallModelProviderCfg = providerCfg
} else {
- for _, p := range cfg.Providers {
- if p.ID == smallModelCfg.Provider {
- smallModelProviderCfg = p
- break
- }
- }
+ smallModelProviderCfg = cfg.GetProviderForModel(config.SelectedModelTypeSmall)
+
if smallModelProviderCfg.ID == "" {
return nil, fmt.Errorf("provider %s not found in config", smallModelCfg.Provider)
}
}
- for _, m := range smallModelProviderCfg.Models {
- if m.ID == smallModelCfg.ModelID {
- smallModel = m
- break
- }
- }
+ smallModel := cfg.GetModelByType(config.SelectedModelTypeSmall)
if smallModel.ID == "" {
- return nil, fmt.Errorf("model %s not found in provider %s", smallModelCfg.ModelID, smallModelProviderCfg.ID)
+ return nil, fmt.Errorf("model %s not found in provider %s", smallModelCfg.Model, smallModelProviderCfg.ID)
}
titleOpts := []provider.ProviderClientOption{
- provider.WithModel(config.SmallModel),
+ provider.WithModel(config.SelectedModelTypeSmall),
provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptTitle, smallModelProviderCfg.ID)),
}
- titleProvider, err := provider.NewProvider(smallModelProviderCfg, titleOpts...)
+ titleProvider, err := provider.NewProvider(*smallModelProviderCfg, titleOpts...)
if err != nil {
return nil, err
}
summarizeOpts := []provider.ProviderClientOption{
- provider.WithModel(config.SmallModel),
+ provider.WithModel(config.SelectedModelTypeSmall),
provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, smallModelProviderCfg.ID)),
}
- summarizeProvider, err := provider.NewProvider(smallModelProviderCfg, summarizeOpts...)
+ summarizeProvider, err := provider.NewProvider(*smallModelProviderCfg, summarizeOpts...)
if err != nil {
return nil, err
}
@@ -225,15 +217,15 @@ func NewAgent(
return agent, nil
}
-func (a *agent) Model() config.Model {
- return config.GetAgentModel(a.agentCfg.ID)
+func (a *agent) Model() fur.Model {
+ return *config.Get().GetModelByType(a.agentCfg.Model)
}
func (a *agent) Cancel(sessionID string) {
// Cancel regular requests
if cancelFunc, exists := a.activeRequests.LoadAndDelete(sessionID); exists {
if cancel, ok := cancelFunc.(context.CancelFunc); ok {
- logging.InfoPersist(fmt.Sprintf("Request cancellation initiated for session: %s", sessionID))
+ slog.Info(fmt.Sprintf("Request cancellation initiated for session: %s", sessionID))
cancel()
}
}
@@ -241,7 +233,7 @@ func (a *agent) Cancel(sessionID string) {
// Also check for summarize requests
if cancelFunc, exists := a.activeRequests.LoadAndDelete(sessionID + "-summarize"); exists {
if cancel, ok := cancelFunc.(context.CancelFunc); ok {
- logging.InfoPersist(fmt.Sprintf("Summarize cancellation initiated for session: %s", sessionID))
+ slog.Info(fmt.Sprintf("Summarize cancellation initiated for session: %s", sessionID))
cancel()
}
}
@@ -335,8 +327,8 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
a.activeRequests.Store(sessionID, cancel)
go func() {
- logging.Debug("Request started", "sessionID", sessionID)
- defer logging.RecoverPanic("agent.Run", func() {
+ slog.Debug("Request started", "sessionID", sessionID)
+ defer log.RecoverPanic("agent.Run", func() {
events <- a.err(fmt.Errorf("panic while running the agent"))
})
var attachmentParts []message.ContentPart
@@ -345,9 +337,9 @@ func (a *agent) Run(ctx context.Context, sessionID string, content string, attac
}
result := a.processGeneration(genCtx, sessionID, content, attachmentParts)
if result.Error != nil && !errors.Is(result.Error, ErrRequestCancelled) && !errors.Is(result.Error, context.Canceled) {
- logging.ErrorPersist(result.Error.Error())
+ slog.Error(result.Error.Error())
}
- logging.Debug("Request completed", "sessionID", sessionID)
+ slog.Debug("Request completed", "sessionID", sessionID)
a.activeRequests.Delete(sessionID)
cancel()
a.Publish(pubsub.CreatedEvent, result)
@@ -366,12 +358,12 @@ func (a *agent) processGeneration(ctx context.Context, sessionID, content string
}
if len(msgs) == 0 {
go func() {
- defer logging.RecoverPanic("agent.Run", func() {
- logging.ErrorPersist("panic while generating title")
+ defer log.RecoverPanic("agent.Run", func() {
+ slog.Error("panic while generating title")
})
titleErr := a.generateTitle(context.Background(), sessionID, content)
if titleErr != nil && !errors.Is(titleErr, context.Canceled) && !errors.Is(titleErr, context.DeadlineExceeded) {
- logging.ErrorPersist(fmt.Sprintf("failed to generate title: %v", titleErr))
+ slog.Error(fmt.Sprintf("failed to generate title: %v", titleErr))
}
}()
}
@@ -418,11 +410,7 @@ func (a *agent) processGeneration(ctx context.Context, sessionID, content string
return a.err(fmt.Errorf("failed to process events: %w", err))
}
if cfg.Options.Debug {
- seqId := (len(msgHistory) + 1) / 2
- toolResultFilepath := logging.WriteToolResultsJson(sessionID, seqId, toolResults)
- logging.Info("Result", "message", agentMessage.FinishReason(), "toolResults", "{}", "filepath", toolResultFilepath)
- } else {
- logging.Info("Result", "message", agentMessage.FinishReason(), "toolResults", toolResults)
+ slog.Info("Result", "message", agentMessage.FinishReason(), "toolResults", toolResults)
}
if (agentMessage.FinishReason() == message.FinishReasonToolUse) && toolResults != nil {
// We are not done, we need to respond with the tool response
@@ -581,22 +569,22 @@ func (a *agent) processEvent(ctx context.Context, sessionID string, assistantMsg
assistantMsg.AppendContent(event.Content)
return a.messages.Update(ctx, *assistantMsg)
case provider.EventToolUseStart:
- logging.Info("Tool call started", "toolCall", event.ToolCall)
+ slog.Info("Tool call started", "toolCall", event.ToolCall)
assistantMsg.AddToolCall(*event.ToolCall)
return a.messages.Update(ctx, *assistantMsg)
case provider.EventToolUseDelta:
assistantMsg.AppendToolCallInput(event.ToolCall.ID, event.ToolCall.Input)
return a.messages.Update(ctx, *assistantMsg)
case provider.EventToolUseStop:
- logging.Info("Finished tool call", "toolCall", event.ToolCall)
+ slog.Info("Finished tool call", "toolCall", event.ToolCall)
assistantMsg.FinishToolCall(event.ToolCall.ID)
return a.messages.Update(ctx, *assistantMsg)
case provider.EventError:
if errors.Is(event.Error, context.Canceled) {
- logging.InfoPersist(fmt.Sprintf("Event processing canceled for session: %s", sessionID))
+ slog.Info(fmt.Sprintf("Event processing canceled for session: %s", sessionID))
return context.Canceled
}
- logging.ErrorPersist(event.Error.Error())
+ slog.Error(event.Error.Error())
return event.Error
case provider.EventComplete:
assistantMsg.SetToolCalls(event.Response.ToolCalls)
@@ -610,7 +598,7 @@ func (a *agent) processEvent(ctx context.Context, sessionID string, assistantMsg
return nil
}
-func (a *agent) TrackUsage(ctx context.Context, sessionID string, model config.Model, usage provider.TokenUsage) error {
+func (a *agent) TrackUsage(ctx context.Context, sessionID string, model fur.Model, usage provider.TokenUsage) error {
sess, err := a.sessions.Get(ctx, sessionID)
if err != nil {
return fmt.Errorf("failed to get session: %w", err)
@@ -819,7 +807,7 @@ func (a *agent) UpdateModel() error {
cfg := config.Get()
// Get current provider configuration
- currentProviderCfg := config.GetAgentProvider(a.agentCfg.ID)
+ currentProviderCfg := cfg.GetProviderForModel(a.agentCfg.Model)
if currentProviderCfg.ID == "" {
return fmt.Errorf("provider for agent %s not found in config", a.agentCfg.Name)
}
@@ -827,7 +815,7 @@ func (a *agent) UpdateModel() error {
// Check if provider has changed
if string(currentProviderCfg.ID) != a.providerID {
// Provider changed, need to recreate the main provider
- model := config.GetAgentModel(a.agentCfg.ID)
+ model := cfg.GetModelByType(a.agentCfg.Model)
if model.ID == "" {
return fmt.Errorf("model not found for agent %s", a.agentCfg.Name)
}
@@ -842,7 +830,7 @@ func (a *agent) UpdateModel() error {
provider.WithSystemMessage(prompt.GetPrompt(promptID, currentProviderCfg.ID)),
}
- newProvider, err := provider.NewProvider(currentProviderCfg, opts...)
+ newProvider, err := provider.NewProvider(*currentProviderCfg, opts...)
if err != nil {
return fmt.Errorf("failed to create new provider: %w", err)
}
@@ -853,7 +841,7 @@ func (a *agent) UpdateModel() error {
}
// Check if small model provider has changed (affects title and summarize providers)
- smallModelCfg := cfg.Models.Small
+ smallModelCfg := cfg.Models[config.SelectedModelTypeSmall]
var smallModelProviderCfg config.ProviderConfig
for _, p := range cfg.Providers {
@@ -869,20 +857,14 @@ func (a *agent) UpdateModel() error {
// Check if summarize provider has changed
if string(smallModelProviderCfg.ID) != a.summarizeProviderID {
- var smallModel config.Model
- for _, m := range smallModelProviderCfg.Models {
- if m.ID == smallModelCfg.ModelID {
- smallModel = m
- break
- }
- }
- if smallModel.ID == "" {
- return fmt.Errorf("model %s not found in provider %s", smallModelCfg.ModelID, smallModelProviderCfg.ID)
+ smallModel := cfg.GetModelByType(config.SelectedModelTypeSmall)
+ if smallModel == nil {
+ return fmt.Errorf("model %s not found in provider %s", smallModelCfg.Model, smallModelProviderCfg.ID)
}
// Recreate title provider
titleOpts := []provider.ProviderClientOption{
- provider.WithModel(config.SmallModel),
+ provider.WithModel(config.SelectedModelTypeSmall),
provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptTitle, smallModelProviderCfg.ID)),
// We want the title to be short, so we limit the max tokens
provider.WithMaxTokens(40),
@@ -894,7 +876,7 @@ func (a *agent) UpdateModel() error {
// Recreate summarize provider
summarizeOpts := []provider.ProviderClientOption{
- provider.WithModel(config.SmallModel),
+ provider.WithModel(config.SelectedModelTypeSmall),
provider.WithSystemMessage(prompt.GetPrompt(prompt.PromptSummarizer, smallModelProviderCfg.ID)),
}
newSummarizeProvider, err := provider.NewProvider(smallModelProviderCfg, summarizeOpts...)
@@ -4,10 +4,11 @@ import (
"context"
"encoding/json"
"fmt"
+ "log/slog"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/llm/tools"
- "github.com/charmbracelet/crush/internal/logging"
+
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/version"
@@ -19,8 +20,9 @@ import (
type mcpTool struct {
mcpName string
tool mcp.Tool
- mcpConfig config.MCP
+ mcpConfig config.MCPConfig
permissions permission.Service
+ workingDir string
}
type MCPClient interface {
@@ -97,7 +99,7 @@ func (b *mcpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
- Path: config.WorkingDirectory(),
+ Path: b.workingDir,
ToolName: b.Info().Name,
Action: "execute",
Description: permissionDescription,
@@ -142,18 +144,19 @@ func (b *mcpTool) Run(ctx context.Context, params tools.ToolCall) (tools.ToolRes
return tools.NewTextErrorResponse("invalid mcp type"), nil
}
-func NewMcpTool(name string, tool mcp.Tool, permissions permission.Service, mcpConfig config.MCP) tools.BaseTool {
+func NewMcpTool(name string, tool mcp.Tool, permissions permission.Service, mcpConfig config.MCPConfig, workingDir string) tools.BaseTool {
return &mcpTool{
mcpName: name,
tool: tool,
mcpConfig: mcpConfig,
permissions: permissions,
+ workingDir: workingDir,
}
}
var mcpTools []tools.BaseTool
-func getTools(ctx context.Context, name string, m config.MCP, permissions permission.Service, c MCPClient) []tools.BaseTool {
+func getTools(ctx context.Context, name string, m config.MCPConfig, permissions permission.Service, c MCPClient, workingDir string) []tools.BaseTool {
var stdioTools []tools.BaseTool
initRequest := mcp.InitializeRequest{}
initRequest.Params.ProtocolVersion = mcp.LATEST_PROTOCOL_VERSION
@@ -164,27 +167,27 @@ func getTools(ctx context.Context, name string, m config.MCP, permissions permis
_, err := c.Initialize(ctx, initRequest)
if err != nil {
- logging.Error("error initializing mcp client", "error", err)
+ slog.Error("error initializing mcp client", "error", err)
return stdioTools
}
toolsRequest := mcp.ListToolsRequest{}
tools, err := c.ListTools(ctx, toolsRequest)
if err != nil {
- logging.Error("error listing tools", "error", err)
+ slog.Error("error listing tools", "error", err)
return stdioTools
}
for _, t := range tools.Tools {
- stdioTools = append(stdioTools, NewMcpTool(name, t, permissions, m))
+ stdioTools = append(stdioTools, NewMcpTool(name, t, permissions, m, workingDir))
}
defer c.Close()
return stdioTools
}
-func GetMcpTools(ctx context.Context, permissions permission.Service) []tools.BaseTool {
+func GetMcpTools(ctx context.Context, permissions permission.Service, cfg *config.Config) []tools.BaseTool {
if len(mcpTools) > 0 {
return mcpTools
}
- for name, m := range config.Get().MCP {
+ for name, m := range cfg.MCP {
switch m.Type {
case config.MCPStdio:
c, err := client.NewStdioMCPClient(
@@ -193,31 +196,31 @@ func GetMcpTools(ctx context.Context, permissions permission.Service) []tools.Ba
m.Args...,
)
if err != nil {
- logging.Error("error creating mcp client", "error", err)
+ slog.Error("error creating mcp client", "error", err)
continue
}
- mcpTools = append(mcpTools, getTools(ctx, name, m, permissions, c)...)
+ mcpTools = append(mcpTools, getTools(ctx, name, m, permissions, c, cfg.WorkingDir())...)
case config.MCPHttp:
c, err := client.NewStreamableHttpClient(
m.URL,
transport.WithHTTPHeaders(m.Headers),
)
if err != nil {
- logging.Error("error creating mcp client", "error", err)
+ slog.Error("error creating mcp client", "error", err)
continue
}
- mcpTools = append(mcpTools, getTools(ctx, name, m, permissions, c)...)
+ mcpTools = append(mcpTools, getTools(ctx, name, m, permissions, c, cfg.WorkingDir())...)
case config.MCPSse:
c, err := client.NewSSEMCPClient(
m.URL,
client.WithHeaders(m.Headers),
)
if err != nil {
- logging.Error("error creating mcp client", "error", err)
+ slog.Error("error creating mcp client", "error", err)
continue
}
- mcpTools = append(mcpTools, getTools(ctx, name, m, permissions, c)...)
+ mcpTools = append(mcpTools, getTools(ctx, name, m, permissions, c, cfg.WorkingDir())...)
}
}
@@ -3,6 +3,7 @@ package prompt
import (
"context"
"fmt"
+ "log/slog"
"os"
"path/filepath"
"runtime"
@@ -11,15 +12,14 @@ import (
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fur/provider"
"github.com/charmbracelet/crush/internal/llm/tools"
- "github.com/charmbracelet/crush/internal/logging"
)
-func CoderPrompt(p provider.InferenceProvider, contextFiles ...string) string {
+func CoderPrompt(p string, contextFiles ...string) string {
var basePrompt string
switch p {
- case provider.InferenceProviderOpenAI:
+ case string(provider.InferenceProviderOpenAI):
basePrompt = baseOpenAICoderPrompt
- case provider.InferenceProviderGemini, provider.InferenceProviderVertexAI:
+ case string(provider.InferenceProviderGemini), string(provider.InferenceProviderVertexAI):
basePrompt = baseGeminiCoderPrompt
default:
basePrompt = baseAnthropicCoderPrompt
@@ -28,8 +28,8 @@ func CoderPrompt(p provider.InferenceProvider, contextFiles ...string) string {
basePrompt = fmt.Sprintf("%s\n\n%s\n%s", basePrompt, envInfo, lspInformation())
- contextContent := getContextFromPaths(contextFiles)
- logging.Debug("Context content", "Context", contextContent)
+ contextContent := getContextFromPaths(config.Get().WorkingDir(), contextFiles)
+ slog.Debug("Context content", "Context", contextContent)
if contextContent != "" {
return fmt.Sprintf("%s\n\n# Project-Specific Context\n Make sure to follow the instructions in the context below\n%s", basePrompt, contextContent)
}
@@ -380,11 +380,11 @@ Your core function is efficient and safe assistance. Balance extreme conciseness
`
func getEnvironmentInfo() string {
- cwd := config.WorkingDirectory()
+ cwd := config.Get().WorkingDir()
isGit := isGitRepo(cwd)
platform := runtime.GOOS
date := time.Now().Format("1/2/2006")
- ls := tools.NewLsTool()
+ ls := tools.NewLsTool(cwd)
r, _ := ls.Run(context.Background(), tools.ToolCall{
Input: `{"path":"."}`,
})
@@ -5,9 +5,6 @@ import (
"path/filepath"
"strings"
"sync"
-
- "github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/fur/provider"
)
type PromptID string
@@ -20,25 +17,25 @@ const (
PromptDefault PromptID = "default"
)
-func GetPrompt(promptID PromptID, provider provider.InferenceProvider, contextPaths ...string) string {
+func GetPrompt(promptID PromptID, provider string, contextPaths ...string) string {
basePrompt := ""
switch promptID {
case PromptCoder:
basePrompt = CoderPrompt(provider)
case PromptTitle:
- basePrompt = TitlePrompt(provider)
+ basePrompt = TitlePrompt()
case PromptTask:
- basePrompt = TaskPrompt(provider)
+ basePrompt = TaskPrompt()
case PromptSummarizer:
- basePrompt = SummarizerPrompt(provider)
+ basePrompt = SummarizerPrompt()
default:
basePrompt = "You are a helpful assistant"
}
return basePrompt
}
-func getContextFromPaths(contextPaths []string) string {
- return processContextPaths(config.WorkingDirectory(), contextPaths)
+func getContextFromPaths(workingDir string, contextPaths []string) string {
+ return processContextPaths(workingDir, contextPaths)
}
func processContextPaths(workDir string, paths []string) string {
@@ -1,56 +0,0 @@
-package prompt
-
-import (
- "fmt"
- "os"
- "path/filepath"
- "testing"
-
- "github.com/charmbracelet/crush/internal/config"
- "github.com/stretchr/testify/assert"
- "github.com/stretchr/testify/require"
-)
-
-func TestGetContextFromPaths(t *testing.T) {
- t.Parallel()
-
- tmpDir := t.TempDir()
- _, err := config.Init(tmpDir, false)
- if err != nil {
- t.Fatalf("Failed to load config: %v", err)
- }
- testFiles := []string{
- "file.txt",
- "directory/file_a.txt",
- "directory/file_b.txt",
- "directory/file_c.txt",
- }
-
- createTestFiles(t, tmpDir, testFiles)
-
- context := getContextFromPaths(
- []string{
- "file.txt",
- "directory/",
- },
- )
- expectedContext := fmt.Sprintf("# From:%s/file.txt\nfile.txt: test content\n# From:%s/directory/file_a.txt\ndirectory/file_a.txt: test content\n# From:%s/directory/file_b.txt\ndirectory/file_b.txt: test content\n# From:%s/directory/file_c.txt\ndirectory/file_c.txt: test content", tmpDir, tmpDir, tmpDir, tmpDir)
- assert.Equal(t, expectedContext, context)
-}
-
-func createTestFiles(t *testing.T, tmpDir string, testFiles []string) {
- t.Helper()
- for _, path := range testFiles {
- fullPath := filepath.Join(tmpDir, path)
- if path[len(path)-1] == '/' {
- err := os.MkdirAll(fullPath, 0o755)
- require.NoError(t, err)
- } else {
- dir := filepath.Dir(fullPath)
- err := os.MkdirAll(dir, 0o755)
- require.NoError(t, err)
- err = os.WriteFile(fullPath, []byte(path+": test content"), 0o644)
- require.NoError(t, err)
- }
- }
-}
@@ -1,10 +1,6 @@
package prompt
-import (
- "github.com/charmbracelet/crush/internal/fur/provider"
-)
-
-func SummarizerPrompt(_ provider.InferenceProvider) string {
+func SummarizerPrompt() string {
return `You are a helpful AI assistant tasked with summarizing conversations.
When asked to summarize, provide a detailed but concise summary of the conversation.
@@ -2,11 +2,9 @@ package prompt
import (
"fmt"
-
- "github.com/charmbracelet/crush/internal/fur/provider"
)
-func TaskPrompt(_ provider.InferenceProvider) string {
+func TaskPrompt() string {
agentPrompt := `You are an agent for Crush. Given the user's prompt, you should use the tools available to you to answer the user's question.
Notes:
1. IMPORTANT: You should be concise, direct, and to the point, since your responses will be displayed on a command line interface. Answer the user's question directly, without elaboration, explanation, or details. One word answers are best. Avoid introductions, conclusions, and explanations. You MUST avoid text before/after your response, such as "The answer is <answer>.", "Here is the content of the file..." or "Based on the information provided, the answer is..." or "Here is what I will do next...".
@@ -1,10 +1,6 @@
package prompt
-import (
- "github.com/charmbracelet/crush/internal/fur/provider"
-)
-
-func TitlePrompt(_ provider.InferenceProvider) string {
+func TitlePrompt() string {
return `you will generate a short title based on the first message a user begins a conversation with
- ensure it is not more than 50 characters long
- the title should be a summary of the user's message
@@ -6,6 +6,7 @@ import (
"errors"
"fmt"
"io"
+ "log/slog"
"regexp"
"strconv"
"time"
@@ -16,7 +17,6 @@ import (
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fur/provider"
"github.com/charmbracelet/crush/internal/llm/tools"
- "github.com/charmbracelet/crush/internal/logging"
"github.com/charmbracelet/crush/internal/message"
)
@@ -92,7 +92,7 @@ func (a *anthropicClient) convertMessages(messages []message.Message) (anthropic
}
if len(blocks) == 0 {
- logging.Warn("There is a message without content, investigate, this should not happen")
+ slog.Warn("There is a message without content, investigate, this should not happen")
continue
}
anthropicMessages = append(anthropicMessages, anthropic.NewAssistantMessage(blocks...))
@@ -153,9 +153,9 @@ func (a *anthropicClient) preparedMessages(messages []anthropic.MessageParam, to
model := a.providerOptions.model(a.providerOptions.modelType)
var thinkingParam anthropic.ThinkingConfigParamUnion
cfg := config.Get()
- modelConfig := cfg.Models.Large
- if a.providerOptions.modelType == config.SmallModel {
- modelConfig = cfg.Models.Small
+ modelConfig := cfg.Models[config.SelectedModelTypeLarge]
+ if a.providerOptions.modelType == config.SelectedModelTypeSmall {
+ modelConfig = cfg.Models[config.SelectedModelTypeSmall]
}
temperature := anthropic.Float(0)
@@ -207,7 +207,7 @@ func (a *anthropicClient) send(ctx context.Context, messages []message.Message,
preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
if cfg.Options.Debug {
jsonData, _ := json.Marshal(preparedMessages)
- logging.Debug("Prepared messages", "messages", string(jsonData))
+ slog.Debug("Prepared messages", "messages", string(jsonData))
}
anthropicResponse, err := a.client.Messages.New(
@@ -216,13 +216,13 @@ func (a *anthropicClient) send(ctx context.Context, messages []message.Message,
)
// If there is an error we are going to see if we can retry the call
if err != nil {
- logging.Error("Error in Anthropic API call", "error", err)
+ slog.Error("Error in Anthropic API call", "error", err)
retry, after, retryErr := a.shouldRetry(attempts, err)
if retryErr != nil {
return nil, retryErr
}
if retry {
- logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
+ slog.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
return nil, ctx.Err()
@@ -259,7 +259,7 @@ func (a *anthropicClient) stream(ctx context.Context, messages []message.Message
preparedMessages := a.preparedMessages(a.convertMessages(messages), a.convertTools(tools))
if cfg.Options.Debug {
jsonData, _ := json.Marshal(preparedMessages)
- logging.Debug("Prepared messages", "messages", string(jsonData))
+ slog.Debug("Prepared messages", "messages", string(jsonData))
}
anthropicStream := a.client.Messages.NewStreaming(
@@ -273,7 +273,7 @@ func (a *anthropicClient) stream(ctx context.Context, messages []message.Message
event := anthropicStream.Current()
err := accumulatedMessage.Accumulate(event)
if err != nil {
- logging.Warn("Error accumulating message", "error", err)
+ slog.Warn("Error accumulating message", "error", err)
continue
}
@@ -364,7 +364,7 @@ func (a *anthropicClient) stream(ctx context.Context, messages []message.Message
return
}
if retry {
- logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
+ slog.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
// context cancelled
@@ -399,7 +399,7 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
}
if apiErr.StatusCode == 401 {
- a.providerOptions.apiKey, err = config.ResolveAPIKey(a.providerOptions.config.APIKey)
+ a.providerOptions.apiKey, err = config.Get().Resolve(a.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
@@ -411,7 +411,7 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
if apiErr.StatusCode == 400 {
if adjusted, ok := a.handleContextLimitError(apiErr); ok {
a.adjustedMaxTokens = adjusted
- logging.Debug("Adjusted max_tokens due to context limit", "new_max_tokens", adjusted)
+ slog.Debug("Adjusted max_tokens due to context limit", "new_max_tokens", adjusted)
return true, 0, nil
}
}
@@ -438,7 +438,8 @@ func (a *anthropicClient) shouldRetry(attempts int, err error) (bool, int64, err
func (a *anthropicClient) handleContextLimitError(apiErr *anthropic.Error) (int, bool) {
// Parse error message like: "input length and max_tokens exceed context limit: 154978 + 50000 > 200000"
errorMsg := apiErr.Error()
- re := regexp.MustCompile(`input length and max_tokens exceed context limit: (\d+) \+ (\d+) > (\d+)`)
+
+ re := regexp.MustCompile("input length and `max_tokens` exceed context limit: (\\d+) \\+ (\\d+) > (\\d+)")
matches := re.FindStringSubmatch(errorMsg)
if len(matches) != 4 {
@@ -490,6 +491,6 @@ func (a *anthropicClient) usage(msg anthropic.Message) TokenUsage {
}
}
-func (a *anthropicClient) Model() config.Model {
+func (a *anthropicClient) Model() provider.Model {
return a.providerOptions.model(a.providerOptions.modelType)
}
@@ -7,6 +7,7 @@ import (
"strings"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/fur/provider"
"github.com/charmbracelet/crush/internal/llm/tools"
"github.com/charmbracelet/crush/internal/message"
)
@@ -31,14 +32,14 @@ func newBedrockClient(opts providerClientOptions) BedrockClient {
}
}
- opts.model = func(modelType config.ModelType) config.Model {
- model := config.GetModel(modelType)
+ opts.model = func(modelType config.SelectedModelType) provider.Model {
+ model := config.Get().GetModelByType(modelType)
// Prefix the model name with region
regionPrefix := region[:2]
modelName := model.ID
model.ID = fmt.Sprintf("%s.%s", regionPrefix, modelName)
- return model
+ return *model
}
model := opts.model(opts.modelType)
@@ -87,6 +88,6 @@ func (b *bedrockClient) stream(ctx context.Context, messages []message.Message,
return b.childProvider.stream(ctx, messages, tools)
}
-func (b *bedrockClient) Model() config.Model {
+func (b *bedrockClient) Model() provider.Model {
return b.providerOptions.model(b.providerOptions.modelType)
}
@@ -6,12 +6,13 @@ import (
"errors"
"fmt"
"io"
+ "log/slog"
"strings"
"time"
"github.com/charmbracelet/crush/internal/config"
+ "github.com/charmbracelet/crush/internal/fur/provider"
"github.com/charmbracelet/crush/internal/llm/tools"
- "github.com/charmbracelet/crush/internal/logging"
"github.com/charmbracelet/crush/internal/message"
"github.com/google/uuid"
"google.golang.org/genai"
@@ -27,7 +28,7 @@ type GeminiClient ProviderClient
func newGeminiClient(opts providerClientOptions) GeminiClient {
client, err := createGeminiClient(opts)
if err != nil {
- logging.Error("Failed to create Gemini client", "error", err)
+ slog.Error("Failed to create Gemini client", "error", err)
return nil
}
@@ -167,12 +168,12 @@ func (g *geminiClient) send(ctx context.Context, messages []message.Message, too
cfg := config.Get()
if cfg.Options.Debug {
jsonData, _ := json.Marshal(geminiMessages)
- logging.Debug("Prepared messages", "messages", string(jsonData))
+ slog.Debug("Prepared messages", "messages", string(jsonData))
}
- modelConfig := cfg.Models.Large
- if g.providerOptions.modelType == config.SmallModel {
- modelConfig = cfg.Models.Small
+ modelConfig := cfg.Models[config.SelectedModelTypeLarge]
+ if g.providerOptions.modelType == config.SelectedModelTypeSmall {
+ modelConfig = cfg.Models[config.SelectedModelTypeSmall]
}
maxTokens := model.DefaultMaxTokens
@@ -209,7 +210,7 @@ func (g *geminiClient) send(ctx context.Context, messages []message.Message, too
return nil, retryErr
}
if retry {
- logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
+ slog.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
return nil, ctx.Err()
@@ -265,12 +266,12 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
cfg := config.Get()
if cfg.Options.Debug {
jsonData, _ := json.Marshal(geminiMessages)
- logging.Debug("Prepared messages", "messages", string(jsonData))
+ slog.Debug("Prepared messages", "messages", string(jsonData))
}
- modelConfig := cfg.Models.Large
- if g.providerOptions.modelType == config.SmallModel {
- modelConfig = cfg.Models.Small
+ modelConfig := cfg.Models[config.SelectedModelTypeLarge]
+ if g.providerOptions.modelType == config.SelectedModelTypeSmall {
+ modelConfig = cfg.Models[config.SelectedModelTypeSmall]
}
maxTokens := model.DefaultMaxTokens
if modelConfig.MaxTokens > 0 {
@@ -322,7 +323,7 @@ func (g *geminiClient) stream(ctx context.Context, messages []message.Message, t
return
}
if retry {
- logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
+ slog.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
if ctx.Err() != nil {
@@ -424,7 +425,7 @@ func (g *geminiClient) shouldRetry(attempts int, err error) (bool, int64, error)
// Check for token expiration (401 Unauthorized)
if contains(errMsg, "unauthorized", "invalid api key", "api key expired") {
- g.providerOptions.apiKey, err = config.ResolveAPIKey(g.providerOptions.config.APIKey)
+ g.providerOptions.apiKey, err = config.Get().Resolve(g.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
@@ -462,7 +463,7 @@ func (g *geminiClient) usage(resp *genai.GenerateContentResponse) TokenUsage {
}
}
-func (g *geminiClient) Model() config.Model {
+func (g *geminiClient) Model() provider.Model {
return g.providerOptions.model(g.providerOptions.modelType)
}
@@ -6,12 +6,12 @@ import (
"errors"
"fmt"
"io"
+ "log/slog"
"time"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fur/provider"
"github.com/charmbracelet/crush/internal/llm/tools"
- "github.com/charmbracelet/crush/internal/logging"
"github.com/charmbracelet/crush/internal/message"
"github.com/openai/openai-go"
"github.com/openai/openai-go/option"
@@ -148,15 +148,12 @@ func (o *openaiClient) preparedParams(messages []openai.ChatCompletionMessagePar
model := o.providerOptions.model(o.providerOptions.modelType)
cfg := config.Get()
- modelConfig := cfg.Models.Large
- if o.providerOptions.modelType == config.SmallModel {
- modelConfig = cfg.Models.Small
+ modelConfig := cfg.Models[config.SelectedModelTypeLarge]
+ if o.providerOptions.modelType == config.SelectedModelTypeSmall {
+ modelConfig = cfg.Models[config.SelectedModelTypeSmall]
}
- reasoningEffort := model.ReasoningEffort
- if modelConfig.ReasoningEffort != "" {
- reasoningEffort = modelConfig.ReasoningEffort
- }
+ reasoningEffort := modelConfig.ReasoningEffort
params := openai.ChatCompletionNewParams{
Model: openai.ChatModel(model.ID),
@@ -197,7 +194,7 @@ func (o *openaiClient) send(ctx context.Context, messages []message.Message, too
cfg := config.Get()
if cfg.Options.Debug {
jsonData, _ := json.Marshal(params)
- logging.Debug("Prepared messages", "messages", string(jsonData))
+ slog.Debug("Prepared messages", "messages", string(jsonData))
}
attempts := 0
for {
@@ -213,7 +210,7 @@ func (o *openaiClient) send(ctx context.Context, messages []message.Message, too
return nil, retryErr
}
if retry {
- logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
+ slog.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
return nil, ctx.Err()
@@ -254,7 +251,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
cfg := config.Get()
if cfg.Options.Debug {
jsonData, _ := json.Marshal(params)
- logging.Debug("Prepared messages", "messages", string(jsonData))
+ slog.Debug("Prepared messages", "messages", string(jsonData))
}
attempts := 0
@@ -291,7 +288,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
if err == nil || errors.Is(err, io.EOF) {
if cfg.Options.Debug {
jsonData, _ := json.Marshal(acc.ChatCompletion)
- logging.Debug("Response", "messages", string(jsonData))
+ slog.Debug("Response", "messages", string(jsonData))
}
resultFinishReason := acc.ChatCompletion.Choices[0].FinishReason
if resultFinishReason == "" {
@@ -329,7 +326,7 @@ func (o *openaiClient) stream(ctx context.Context, messages []message.Message, t
return
}
if retry {
- logging.WarnPersist(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries), logging.PersistTimeArg, time.Millisecond*time.Duration(after+100))
+ slog.Warn(fmt.Sprintf("Retrying due to rate limit... attempt %d of %d", attempts, maxRetries))
select {
case <-ctx.Done():
// context cancelled
@@ -363,7 +360,7 @@ func (o *openaiClient) shouldRetry(attempts int, err error) (bool, int64, error)
// Check for token expiration (401 Unauthorized)
if apiErr.StatusCode == 401 {
- o.providerOptions.apiKey, err = config.ResolveAPIKey(o.providerOptions.config.APIKey)
+ o.providerOptions.apiKey, err = config.Get().Resolve(o.providerOptions.config.APIKey)
if err != nil {
return false, 0, fmt.Errorf("failed to resolve API key: %w", err)
}
@@ -420,6 +417,6 @@ func (o *openaiClient) usage(completion openai.ChatCompletion) TokenUsage {
}
}
-func (a *openaiClient) Model() config.Model {
+func (a *openaiClient) Model() provider.Model {
return a.providerOptions.model(a.providerOptions.modelType)
}
@@ -55,15 +55,15 @@ type Provider interface {
StreamResponse(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent
- Model() config.Model
+ Model() provider.Model
}
type providerClientOptions struct {
baseURL string
config config.ProviderConfig
apiKey string
- modelType config.ModelType
- model func(config.ModelType) config.Model
+ modelType config.SelectedModelType
+ model func(config.SelectedModelType) provider.Model
disableCache bool
systemMessage string
maxTokens int64
@@ -77,7 +77,7 @@ type ProviderClient interface {
send(ctx context.Context, messages []message.Message, tools []tools.BaseTool) (*ProviderResponse, error)
stream(ctx context.Context, messages []message.Message, tools []tools.BaseTool) <-chan ProviderEvent
- Model() config.Model
+ Model() provider.Model
}
type baseProvider[C ProviderClient] struct {
@@ -106,11 +106,11 @@ func (p *baseProvider[C]) StreamResponse(ctx context.Context, messages []message
return p.client.stream(ctx, messages, tools)
}
-func (p *baseProvider[C]) Model() config.Model {
+func (p *baseProvider[C]) Model() provider.Model {
return p.client.Model()
}
-func WithModel(model config.ModelType) ProviderClientOption {
+func WithModel(model config.SelectedModelType) ProviderClientOption {
return func(options *providerClientOptions) {
options.modelType = model
}
@@ -135,7 +135,7 @@ func WithMaxTokens(maxTokens int64) ProviderClientOption {
}
func NewProvider(cfg config.ProviderConfig, opts ...ProviderClientOption) (Provider, error) {
- resolvedAPIKey, err := config.ResolveAPIKey(cfg.APIKey)
+ resolvedAPIKey, err := config.Get().Resolve(cfg.APIKey)
if err != nil {
return nil, fmt.Errorf("failed to resolve API key for provider %s: %w", cfg.ID, err)
}
@@ -145,14 +145,14 @@ func NewProvider(cfg config.ProviderConfig, opts ...ProviderClientOption) (Provi
config: cfg,
apiKey: resolvedAPIKey,
extraHeaders: cfg.ExtraHeaders,
- model: func(tp config.ModelType) config.Model {
- return config.GetModel(tp)
+ model: func(tp config.SelectedModelType) provider.Model {
+ return *config.Get().GetModelByType(tp)
},
}
for _, o := range opts {
o(&clientOptions)
}
- switch cfg.ProviderType {
+ switch cfg.Type {
case provider.TypeAnthropic:
return &baseProvider[AnthropicClient]{
options: clientOptions,
@@ -190,5 +190,5 @@ func NewProvider(cfg config.ProviderConfig, opts ...ProviderClientOption) (Provi
client: newOpenAIClient(clientOptions),
}, nil
}
- return nil, fmt.Errorf("provider not supported: %s", cfg.ProviderType)
+ return nil, fmt.Errorf("provider not supported: %s", cfg.Type)
}
@@ -2,8 +2,8 @@ package provider
import (
"context"
+ "log/slog"
- "github.com/charmbracelet/crush/internal/logging"
"google.golang.org/genai"
)
@@ -18,7 +18,7 @@ func newVertexAIClient(opts providerClientOptions) VertexAIClient {
Backend: genai.BackendVertexAI,
})
if err != nil {
- logging.Error("Failed to create VertexAI client", "error", err)
+ slog.Error("Failed to create VertexAI client", "error", err)
return nil
}
@@ -8,7 +8,6 @@ import (
"strings"
"time"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/shell"
)
@@ -29,6 +28,7 @@ type BashResponseMetadata struct {
}
type bashTool struct {
permissions permission.Service
+ workingDir string
}
const (
@@ -244,9 +244,10 @@ Important:
- Never update git config`, bannedCommandsStr, MaxOutputLength)
}
-func NewBashTool(permission permission.Service) BaseTool {
+func NewBashTool(permission permission.Service, workingDir string) BaseTool {
return &bashTool{
permissions: permission,
+ workingDir: workingDir,
}
}
@@ -317,7 +318,7 @@ func (b *bashTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
p := b.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
- Path: config.WorkingDirectory(),
+ Path: b.workingDir,
ToolName: BashToolName,
Action: "execute",
Description: fmt.Sprintf("Execute command: %s", params.Command),
@@ -337,7 +338,7 @@ func (b *bashTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
defer cancel()
}
stdout, stderr, err := shell.
- GetPersistentShell(config.WorkingDirectory()).
+ GetPersistentShell(b.workingDir).
Exec(ctx, params.Command)
interrupted := shell.IsInterrupt(err)
exitCode := shell.ExitCode(err)
@@ -4,15 +4,15 @@ import (
"context"
"encoding/json"
"fmt"
+ "log/slog"
"os"
"path/filepath"
"strings"
"time"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/history"
- "github.com/charmbracelet/crush/internal/logging"
+
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/permission"
)
@@ -40,6 +40,7 @@ type editTool struct {
lspClients map[string]*lsp.Client
permissions permission.Service
files history.Service
+ workingDir string
}
const (
@@ -98,11 +99,12 @@ WINDOWS NOTES:
Remember: when making multiple file edits in a row to the same file, you should prefer to send all edits in a single message with multiple calls to this tool, rather than multiple messages with a single call each.`
)
-func NewEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service) BaseTool {
+func NewEditTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &editTool{
lspClients: lspClients,
permissions: permissions,
files: files,
+ workingDir: workingDir,
}
}
@@ -143,8 +145,7 @@ func (e *editTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
}
if !filepath.IsAbs(params.FilePath) {
- wd := config.WorkingDirectory()
- params.FilePath = filepath.Join(wd, params.FilePath)
+ params.FilePath = filepath.Join(e.workingDir, params.FilePath)
}
var response ToolResponse
@@ -205,9 +206,9 @@ func (e *editTool) createNewFile(ctx context.Context, filePath, content string)
_, additions, removals := diff.GenerateDiff(
"",
content,
- filePath,
+ strings.TrimPrefix(filePath, e.workingDir),
)
- rootDir := config.WorkingDirectory()
+ rootDir := e.workingDir
permissionPath := filepath.Dir(filePath)
if strings.HasPrefix(filePath, rootDir) {
permissionPath = rootDir
@@ -246,7 +247,7 @@ func (e *editTool) createNewFile(ctx context.Context, filePath, content string)
_, err = e.files.CreateVersion(ctx, sessionID, filePath, content)
if err != nil {
// Log error but don't fail the operation
- logging.Debug("Error creating file history version", "error", err)
+ slog.Debug("Error creating file history version", "error", err)
}
recordFileWrite(filePath)
@@ -317,10 +318,10 @@ func (e *editTool) deleteContent(ctx context.Context, filePath, oldString string
_, additions, removals := diff.GenerateDiff(
oldContent,
newContent,
- filePath,
+ strings.TrimPrefix(filePath, e.workingDir),
)
- rootDir := config.WorkingDirectory()
+ rootDir := e.workingDir
permissionPath := filepath.Dir(filePath)
if strings.HasPrefix(filePath, rootDir) {
permissionPath = rootDir
@@ -361,13 +362,13 @@ func (e *editTool) deleteContent(ctx context.Context, filePath, oldString string
// User Manually changed the content store an intermediate version
_, err = e.files.CreateVersion(ctx, sessionID, filePath, oldContent)
if err != nil {
- logging.Debug("Error creating file history version", "error", err)
+ slog.Debug("Error creating file history version", "error", err)
}
}
// Store the new version
_, err = e.files.CreateVersion(ctx, sessionID, filePath, "")
if err != nil {
- logging.Debug("Error creating file history version", "error", err)
+ slog.Debug("Error creating file history version", "error", err)
}
recordFileWrite(filePath)
@@ -440,9 +441,9 @@ func (e *editTool) replaceContent(ctx context.Context, filePath, oldString, newS
_, additions, removals := diff.GenerateDiff(
oldContent,
newContent,
- filePath,
+ strings.TrimPrefix(filePath, e.workingDir),
)
- rootDir := config.WorkingDirectory()
+ rootDir := e.workingDir
permissionPath := filepath.Dir(filePath)
if strings.HasPrefix(filePath, rootDir) {
permissionPath = rootDir
@@ -483,13 +484,13 @@ func (e *editTool) replaceContent(ctx context.Context, filePath, oldString, newS
// User Manually changed the content store an intermediate version
_, err = e.files.CreateVersion(ctx, sessionID, filePath, oldContent)
if err != nil {
- logging.Debug("Error creating file history version", "error", err)
+ slog.Debug("Error creating file history version", "error", err)
}
}
// Store the new version
_, err = e.files.CreateVersion(ctx, sessionID, filePath, newContent)
if err != nil {
- logging.Debug("Error creating file history version", "error", err)
+ slog.Debug("Error creating file history version", "error", err)
}
recordFileWrite(filePath)
@@ -11,7 +11,6 @@ import (
md "github.com/JohannesKaufmann/html-to-markdown"
"github.com/PuerkitoBio/goquery"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/permission"
)
@@ -30,6 +29,7 @@ type FetchPermissionsParams struct {
type fetchTool struct {
client *http.Client
permissions permission.Service
+ workingDir string
}
const (
@@ -65,7 +65,7 @@ TIPS:
- Set appropriate timeouts for potentially slow websites`
)
-func NewFetchTool(permissions permission.Service) BaseTool {
+func NewFetchTool(permissions permission.Service, workingDir string) BaseTool {
return &fetchTool{
client: &http.Client{
Timeout: 30 * time.Second,
@@ -76,6 +76,7 @@ func NewFetchTool(permissions permission.Service) BaseTool {
},
},
permissions: permissions,
+ workingDir: workingDir,
}
}
@@ -133,7 +134,7 @@ func (t *fetchTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error
p := t.permissions.Request(
permission.CreatePermissionRequest{
SessionID: sessionID,
- Path: config.WorkingDirectory(),
+ Path: t.workingDir,
ToolName: FetchToolName,
Action: "fetch",
Description: fmt.Sprintf("Fetch content from URL: %s", params.URL),
@@ -5,14 +5,13 @@ import (
"context"
"encoding/json"
"fmt"
+ "log/slog"
"os/exec"
"path/filepath"
"sort"
"strings"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fsext"
- "github.com/charmbracelet/crush/internal/logging"
)
const (
@@ -68,10 +67,14 @@ type GlobResponseMetadata struct {
Truncated bool `json:"truncated"`
}
-type globTool struct{}
+type globTool struct {
+ workingDir string
+}
-func NewGlobTool() BaseTool {
- return &globTool{}
+func NewGlobTool(workingDir string) BaseTool {
+ return &globTool{
+ workingDir: workingDir,
+ }
}
func (g *globTool) Name() string {
@@ -108,7 +111,7 @@ func (g *globTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
searchPath := params.Path
if searchPath == "" {
- searchPath = config.WorkingDirectory()
+ searchPath = g.workingDir
}
files, truncated, err := globFiles(params.Pattern, searchPath, 100)
@@ -143,7 +146,7 @@ func globFiles(pattern, searchPath string, limit int) ([]string, bool, error) {
if err == nil {
return matches, len(matches) >= limit && limit > 0, nil
}
- logging.Warn(fmt.Sprintf("Ripgrep execution failed: %v. Falling back to doublestar.", err))
+ slog.Warn(fmt.Sprintf("Ripgrep execution failed: %v. Falling back to doublestar.", err))
}
return fsext.GlobWithDoubleStar(pattern, searchPath, limit)
@@ -16,7 +16,6 @@ import (
"sync"
"time"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fsext"
)
@@ -89,7 +88,9 @@ type GrepResponseMetadata struct {
Truncated bool `json:"truncated"`
}
-type grepTool struct{}
+type grepTool struct {
+ workingDir string
+}
const (
GrepToolName = "grep"
@@ -136,8 +137,10 @@ TIPS:
- Use literal_text=true when searching for exact text containing special characters like dots, parentheses, etc.`
)
-func NewGrepTool() BaseTool {
- return &grepTool{}
+func NewGrepTool(workingDir string) BaseTool {
+ return &grepTool{
+ workingDir: workingDir,
+ }
}
func (g *grepTool) Name() string {
@@ -200,7 +203,7 @@ func (g *grepTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
searchPath := params.Path
if searchPath == "" {
- searchPath = config.WorkingDirectory()
+ searchPath = g.workingDir
}
matches, truncated, err := searchFiles(searchPattern, searchPath, params.Include, 100)
@@ -8,7 +8,6 @@ import (
"path/filepath"
"strings"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/fsext"
)
@@ -29,7 +28,9 @@ type LSResponseMetadata struct {
Truncated bool `json:"truncated"`
}
-type lsTool struct{}
+type lsTool struct {
+ workingDir string
+}
const (
LSToolName = "ls"
@@ -70,8 +71,10 @@ TIPS:
- Combine with other tools for more effective exploration`
)
-func NewLsTool() BaseTool {
- return &lsTool{}
+func NewLsTool(workingDir string) BaseTool {
+ return &lsTool{
+ workingDir: workingDir,
+ }
}
func (l *lsTool) Name() string {
@@ -107,11 +110,11 @@ func (l *lsTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error) {
searchPath := params.Path
if searchPath == "" {
- searchPath = config.WorkingDirectory()
+ searchPath = l.workingDir
}
if !filepath.IsAbs(searchPath) {
- searchPath = filepath.Join(config.WorkingDirectory(), searchPath)
+ searchPath = filepath.Join(l.workingDir, searchPath)
}
if _, err := os.Stat(searchPath); os.IsNotExist(err) {
@@ -10,7 +10,6 @@ import (
"path/filepath"
"strings"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/lsp"
)
@@ -22,6 +21,7 @@ type ViewParams struct {
type viewTool struct {
lspClients map[string]*lsp.Client
+ workingDir string
}
type ViewResponseMetadata struct {
@@ -71,9 +71,10 @@ TIPS:
- When viewing large files, use the offset parameter to read specific sections`
)
-func NewViewTool(lspClients map[string]*lsp.Client) BaseTool {
+func NewViewTool(lspClients map[string]*lsp.Client, workingDir string) BaseTool {
return &viewTool{
- lspClients,
+ lspClients: lspClients,
+ workingDir: workingDir,
}
}
@@ -117,7 +118,7 @@ func (v *viewTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error)
// Handle relative paths
filePath := params.FilePath
if !filepath.IsAbs(filePath) {
- filePath = filepath.Join(config.WorkingDirectory(), filePath)
+ filePath = filepath.Join(v.workingDir, filePath)
}
// Check if file exists
@@ -4,15 +4,15 @@ import (
"context"
"encoding/json"
"fmt"
+ "log/slog"
"os"
"path/filepath"
"strings"
"time"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/history"
- "github.com/charmbracelet/crush/internal/logging"
+
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/permission"
)
@@ -32,6 +32,7 @@ type writeTool struct {
lspClients map[string]*lsp.Client
permissions permission.Service
files history.Service
+ workingDir string
}
type WriteResponseMetadata struct {
@@ -76,11 +77,12 @@ TIPS:
- Always include descriptive comments when making changes to existing code`
)
-func NewWriteTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service) BaseTool {
+func NewWriteTool(lspClients map[string]*lsp.Client, permissions permission.Service, files history.Service, workingDir string) BaseTool {
return &writeTool{
lspClients: lspClients,
permissions: permissions,
files: files,
+ workingDir: workingDir,
}
}
@@ -122,7 +124,7 @@ func (w *writeTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error
filePath := params.FilePath
if !filepath.IsAbs(filePath) {
- filePath = filepath.Join(config.WorkingDirectory(), filePath)
+ filePath = filepath.Join(w.workingDir, filePath)
}
fileInfo, err := os.Stat(filePath)
@@ -167,10 +169,10 @@ func (w *writeTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error
diff, additions, removals := diff.GenerateDiff(
oldContent,
params.Content,
- filePath,
+ strings.TrimPrefix(filePath, w.workingDir),
)
- rootDir := config.WorkingDirectory()
+ rootDir := w.workingDir
permissionPath := filepath.Dir(filePath)
if strings.HasPrefix(filePath, rootDir) {
permissionPath = rootDir
@@ -211,13 +213,13 @@ func (w *writeTool) Run(ctx context.Context, call ToolCall) (ToolResponse, error
// User Manually changed the content store an intermediate version
_, err = w.files.CreateVersion(ctx, sessionID, filePath, oldContent)
if err != nil {
- logging.Debug("Error creating file history version", "error", err)
+ slog.Debug("Error creating file history version", "error", err)
}
}
// Store the new version
_, err = w.files.CreateVersion(ctx, sessionID, filePath, params.Content)
if err != nil {
- logging.Debug("Error creating file history version", "error", err)
+ slog.Debug("Error creating file history version", "error", err)
}
recordFileWrite(filePath)
@@ -0,0 +1,61 @@
+package log
+
+import (
+ "fmt"
+ "log/slog"
+ "os"
+ "runtime/debug"
+ "sync"
+ "time"
+
+ "gopkg.in/natefinch/lumberjack.v2"
+)
+
+var initOnce sync.Once
+
+func Init(logFile string, debug bool) {
+ initOnce.Do(func() {
+ logRotator := &lumberjack.Logger{
+ Filename: logFile,
+ MaxSize: 10, // Max size in MB
+ MaxBackups: 0, // Number of backups
+ MaxAge: 30, // Days
+ Compress: false, // Enable compression
+ }
+
+ level := slog.LevelInfo
+ if debug {
+ level = slog.LevelDebug
+ }
+
+ logger := slog.NewJSONHandler(logRotator, &slog.HandlerOptions{
+ Level: level,
+ AddSource: true,
+ })
+
+ slog.SetDefault(slog.New(logger))
+ })
+}
+
+func RecoverPanic(name string, cleanup func()) {
+ if r := recover(); r != nil {
+ // Create a timestamped panic log file
+ timestamp := time.Now().Format("20060102-150405")
+ filename := fmt.Sprintf("crush-panic-%s-%s.log", name, timestamp)
+
+ file, err := os.Create(filename)
+ if err == nil {
+ defer file.Close()
+
+ // Write panic information and stack trace
+ fmt.Fprintf(file, "Panic in %s: %v\n\n", name, r)
+ fmt.Fprintf(file, "Time: %s\n\n", time.Now().Format(time.RFC3339))
+ fmt.Fprintf(file, "Stack Trace:\n%s\n", debug.Stack())
+
+ // Execute cleanup function if provided
+ if cleanup != nil {
+ cleanup()
+ }
+ }
+ }
+}
@@ -1,209 +0,0 @@
-package logging
-
-import (
- "fmt"
- "log/slog"
- "os"
-
- // "path/filepath"
- "encoding/json"
- "runtime"
- "runtime/debug"
- "sync"
- "time"
-)
-
-func getCaller() string {
- var caller string
- if _, file, line, ok := runtime.Caller(2); ok {
- // caller = fmt.Sprintf("%s:%d", filepath.Base(file), line)
- caller = fmt.Sprintf("%s:%d", file, line)
- } else {
- caller = "unknown"
- }
- return caller
-}
-
-func Info(msg string, args ...any) {
- source := getCaller()
- slog.Info(msg, append([]any{"source", source}, args...)...)
-}
-
-func Debug(msg string, args ...any) {
- // slog.Debug(msg, args...)
- source := getCaller()
- slog.Debug(msg, append([]any{"source", source}, args...)...)
-}
-
-func Warn(msg string, args ...any) {
- slog.Warn(msg, args...)
-}
-
-func Error(msg string, args ...any) {
- slog.Error(msg, args...)
-}
-
-func InfoPersist(msg string, args ...any) {
- args = append(args, persistKeyArg, true)
- slog.Info(msg, args...)
-}
-
-func DebugPersist(msg string, args ...any) {
- args = append(args, persistKeyArg, true)
- slog.Debug(msg, args...)
-}
-
-func WarnPersist(msg string, args ...any) {
- args = append(args, persistKeyArg, true)
- slog.Warn(msg, args...)
-}
-
-func ErrorPersist(msg string, args ...any) {
- args = append(args, persistKeyArg, true)
- slog.Error(msg, args...)
-}
-
-// RecoverPanic is a common function to handle panics gracefully.
-// It logs the error, creates a panic log file with stack trace,
-// and executes an optional cleanup function before returning.
-func RecoverPanic(name string, cleanup func()) {
- if r := recover(); r != nil {
- // Log the panic
- ErrorPersist(fmt.Sprintf("Panic in %s: %v", name, r))
-
- // Create a timestamped panic log file
- timestamp := time.Now().Format("20060102-150405")
- filename := fmt.Sprintf("crush-panic-%s-%s.log", name, timestamp)
-
- file, err := os.Create(filename)
- if err != nil {
- ErrorPersist(fmt.Sprintf("Failed to create panic log: %v", err))
- } else {
- defer file.Close()
-
- // Write panic information and stack trace
- fmt.Fprintf(file, "Panic in %s: %v\n\n", name, r)
- fmt.Fprintf(file, "Time: %s\n\n", time.Now().Format(time.RFC3339))
- fmt.Fprintf(file, "Stack Trace:\n%s\n", debug.Stack())
-
- InfoPersist(fmt.Sprintf("Panic details written to %s", filename))
- }
-
- // Execute cleanup function if provided
- if cleanup != nil {
- cleanup()
- }
- }
-}
-
-// Message Logging for Debug
-var MessageDir string
-
-func GetSessionPrefix(sessionId string) string {
- return sessionId[:8]
-}
-
-var sessionLogMutex sync.Mutex
-
-func AppendToSessionLogFile(sessionId string, filename string, content string) string {
- if MessageDir == "" || sessionId == "" {
- return ""
- }
- sessionPrefix := GetSessionPrefix(sessionId)
-
- sessionLogMutex.Lock()
- defer sessionLogMutex.Unlock()
-
- sessionPath := fmt.Sprintf("%s/%s", MessageDir, sessionPrefix)
- if _, err := os.Stat(sessionPath); os.IsNotExist(err) {
- if err := os.MkdirAll(sessionPath, 0o766); err != nil {
- Error("Failed to create session directory", "dirpath", sessionPath, "error", err)
- return ""
- }
- }
-
- filePath := fmt.Sprintf("%s/%s", sessionPath, filename)
-
- f, err := os.OpenFile(filePath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o644)
- if err != nil {
- Error("Failed to open session log file", "filepath", filePath, "error", err)
- return ""
- }
- defer f.Close()
-
- // Append chunk to file
- _, err = f.WriteString(content)
- if err != nil {
- Error("Failed to write chunk to session log file", "filepath", filePath, "error", err)
- return ""
- }
- return filePath
-}
-
-func WriteRequestMessageJson(sessionId string, requestSeqId int, message any) string {
- if MessageDir == "" || sessionId == "" || requestSeqId <= 0 {
- return ""
- }
- msgJson, err := json.Marshal(message)
- if err != nil {
- Error("Failed to marshal message", "session_id", sessionId, "request_seq_id", requestSeqId, "error", err)
- return ""
- }
- return WriteRequestMessage(sessionId, requestSeqId, string(msgJson))
-}
-
-func WriteRequestMessage(sessionId string, requestSeqId int, message string) string {
- if MessageDir == "" || sessionId == "" || requestSeqId <= 0 {
- return ""
- }
- filename := fmt.Sprintf("%d_request.json", requestSeqId)
-
- return AppendToSessionLogFile(sessionId, filename, message)
-}
-
-func AppendToStreamSessionLogJson(sessionId string, requestSeqId int, jsonableChunk any) string {
- if MessageDir == "" || sessionId == "" || requestSeqId <= 0 {
- return ""
- }
- chunkJson, err := json.Marshal(jsonableChunk)
- if err != nil {
- Error("Failed to marshal message", "session_id", sessionId, "request_seq_id", requestSeqId, "error", err)
- return ""
- }
- return AppendToStreamSessionLog(sessionId, requestSeqId, string(chunkJson))
-}
-
-func AppendToStreamSessionLog(sessionId string, requestSeqId int, chunk string) string {
- if MessageDir == "" || sessionId == "" || requestSeqId <= 0 {
- return ""
- }
- filename := fmt.Sprintf("%d_response_stream.log", requestSeqId)
- return AppendToSessionLogFile(sessionId, filename, chunk)
-}
-
-func WriteChatResponseJson(sessionId string, requestSeqId int, response any) string {
- if MessageDir == "" || sessionId == "" || requestSeqId <= 0 {
- return ""
- }
- responseJson, err := json.Marshal(response)
- if err != nil {
- Error("Failed to marshal response", "session_id", sessionId, "request_seq_id", requestSeqId, "error", err)
- return ""
- }
- filename := fmt.Sprintf("%d_response.json", requestSeqId)
-
- return AppendToSessionLogFile(sessionId, filename, string(responseJson))
-}
-
-func WriteToolResultsJson(sessionId string, requestSeqId int, toolResults any) string {
- if MessageDir == "" || sessionId == "" || requestSeqId <= 0 {
- return ""
- }
- toolResultsJson, err := json.Marshal(toolResults)
- if err != nil {
- Error("Failed to marshal tool results", "session_id", sessionId, "request_seq_id", requestSeqId, "error", err)
- return ""
- }
- filename := fmt.Sprintf("%d_tool_results.json", requestSeqId)
- return AppendToSessionLogFile(sessionId, filename, string(toolResultsJson))
-}
@@ -1,21 +0,0 @@
-package logging
-
-import (
- "time"
-)
-
-// LogMessage is the event payload for a log message
-type LogMessage struct {
- ID string
- Time time.Time
- Level string
- Persist bool // used when we want to show the mesage in the status bar
- PersistTime time.Duration // used when we want to show the mesage in the status bar
- Message string `json:"msg"`
- Attributes []Attr
-}
-
-type Attr struct {
- Key string
- Value string
-}
@@ -1,102 +0,0 @@
-package logging
-
-import (
- "bytes"
- "context"
- "fmt"
- "strings"
- "sync"
- "time"
-
- "github.com/charmbracelet/crush/internal/pubsub"
- "github.com/go-logfmt/logfmt"
-)
-
-const (
- persistKeyArg = "$_persist"
- PersistTimeArg = "$_persist_time"
-)
-
-type LogData struct {
- messages []LogMessage
- *pubsub.Broker[LogMessage]
- lock sync.Mutex
-}
-
-func (l *LogData) Add(msg LogMessage) {
- l.lock.Lock()
- defer l.lock.Unlock()
- l.messages = append(l.messages, msg)
- l.Publish(pubsub.CreatedEvent, msg)
-}
-
-func (l *LogData) List() []LogMessage {
- l.lock.Lock()
- defer l.lock.Unlock()
- return l.messages
-}
-
-var defaultLogData = &LogData{
- messages: make([]LogMessage, 0),
- Broker: pubsub.NewBroker[LogMessage](),
-}
-
-type writer struct{}
-
-func (w *writer) Write(p []byte) (int, error) {
- d := logfmt.NewDecoder(bytes.NewReader(p))
-
- for d.ScanRecord() {
- msg := LogMessage{
- ID: fmt.Sprintf("%d", time.Now().UnixNano()),
- Time: time.Now(),
- }
- for d.ScanKeyval() {
- switch string(d.Key()) {
- case "time":
- parsed, err := time.Parse(time.RFC3339, string(d.Value()))
- if err != nil {
- return 0, fmt.Errorf("parsing time: %w", err)
- }
- msg.Time = parsed
- case "level":
- msg.Level = strings.ToLower(string(d.Value()))
- case "msg":
- msg.Message = string(d.Value())
- default:
- if string(d.Key()) == persistKeyArg {
- msg.Persist = true
- } else if string(d.Key()) == PersistTimeArg {
- parsed, err := time.ParseDuration(string(d.Value()))
- if err != nil {
- continue
- }
- msg.PersistTime = parsed
- } else {
- msg.Attributes = append(msg.Attributes, Attr{
- Key: string(d.Key()),
- Value: string(d.Value()),
- })
- }
- }
- }
- defaultLogData.Add(msg)
- }
- if d.Err() != nil {
- return 0, d.Err()
- }
- return len(p), nil
-}
-
-func NewWriter() *writer {
- w := &writer{}
- return w
-}
-
-func Subscribe(ctx context.Context) <-chan pubsub.Event[LogMessage] {
- return defaultLogData.Subscribe(ctx)
-}
-
-func List() []LogMessage {
- return defaultLogData.List()
-}
@@ -6,6 +6,7 @@ import (
"encoding/json"
"fmt"
"io"
+ "log/slog"
"os"
"os/exec"
"path/filepath"
@@ -15,7 +16,7 @@ import (
"time"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/logging"
+ "github.com/charmbracelet/crush/internal/log"
"github.com/charmbracelet/crush/internal/lsp/protocol"
)
@@ -96,17 +97,17 @@ func NewClient(ctx context.Context, command string, args ...string) (*Client, er
go func() {
scanner := bufio.NewScanner(stderr)
for scanner.Scan() {
- logging.Error("LSP Server", "err", scanner.Text())
+ slog.Error("LSP Server", "err", scanner.Text())
}
if err := scanner.Err(); err != nil {
- logging.Error("Error reading", "err", err)
+ slog.Error("Error reading", "err", err)
}
}()
// Start message handling loop
go func() {
- defer logging.RecoverPanic("LSP-message-handler", func() {
- logging.ErrorPersist("LSP message handler crashed, LSP functionality may be impaired")
+ defer log.RecoverPanic("LSP-message-handler", func() {
+ slog.Error("LSP message handler crashed, LSP functionality may be impaired")
})
client.handleMessages()
}()
@@ -300,7 +301,7 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
defer ticker.Stop()
if cfg.Options.DebugLSP {
- logging.Debug("Waiting for LSP server to be ready...")
+ slog.Debug("Waiting for LSP server to be ready...")
}
// Determine server type for specialized initialization
@@ -309,7 +310,7 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
// For TypeScript-like servers, we need to open some key files first
if serverType == ServerTypeTypeScript {
if cfg.Options.DebugLSP {
- logging.Debug("TypeScript-like server detected, opening key configuration files")
+ slog.Debug("TypeScript-like server detected, opening key configuration files")
}
c.openKeyConfigFiles(ctx)
}
@@ -326,15 +327,15 @@ func (c *Client) WaitForServerReady(ctx context.Context) error {
// Server responded successfully
c.SetServerState(StateReady)
if cfg.Options.DebugLSP {
- logging.Debug("LSP server is ready")
+ slog.Debug("LSP server is ready")
}
return nil
} else {
- logging.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
+ slog.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
}
if cfg.Options.DebugLSP {
- logging.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
+ slog.Debug("LSP server not ready yet", "error", err, "serverType", serverType)
}
}
}
@@ -376,7 +377,7 @@ func (c *Client) detectServerType() ServerType {
// openKeyConfigFiles opens important configuration files that help initialize the server
func (c *Client) openKeyConfigFiles(ctx context.Context) {
- workDir := config.WorkingDirectory()
+ workDir := config.Get().WorkingDir()
serverType := c.detectServerType()
var filesToOpen []string
@@ -409,9 +410,9 @@ func (c *Client) openKeyConfigFiles(ctx context.Context) {
if _, err := os.Stat(file); err == nil {
// File exists, try to open it
if err := c.OpenFile(ctx, file); err != nil {
- logging.Debug("Failed to open key config file", "file", file, "error", err)
+ slog.Debug("Failed to open key config file", "file", file, "error", err)
} else {
- logging.Debug("Opened key config file for initialization", "file", file)
+ slog.Debug("Opened key config file for initialization", "file", file)
}
}
}
@@ -464,7 +465,7 @@ func (c *Client) pingTypeScriptServer(ctx context.Context) error {
}
// If we have no open TypeScript files, try to find and open one
- workDir := config.WorkingDirectory()
+ workDir := config.Get().WorkingDir()
err := filepath.WalkDir(workDir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
@@ -487,7 +488,7 @@ func (c *Client) pingTypeScriptServer(ctx context.Context) error {
return nil
})
if err != nil {
- logging.Debug("Error walking directory for TypeScript files", "error", err)
+ slog.Debug("Error walking directory for TypeScript files", "error", err)
}
// Final fallback - just try a generic capability
@@ -527,7 +528,7 @@ func (c *Client) openTypeScriptFiles(ctx context.Context, workDir string) {
if err := c.OpenFile(ctx, path); err == nil {
filesOpened++
if cfg.Options.DebugLSP {
- logging.Debug("Opened TypeScript file for initialization", "file", path)
+ slog.Debug("Opened TypeScript file for initialization", "file", path)
}
}
}
@@ -536,11 +537,11 @@ func (c *Client) openTypeScriptFiles(ctx context.Context, workDir string) {
})
if err != nil && cfg.Options.DebugLSP {
- logging.Debug("Error walking directory for TypeScript files", "error", err)
+ slog.Debug("Error walking directory for TypeScript files", "error", err)
}
if cfg.Options.DebugLSP {
- logging.Debug("Opened TypeScript files for initialization", "count", filesOpened)
+ slog.Debug("Opened TypeScript files for initialization", "count", filesOpened)
}
}
@@ -681,7 +682,7 @@ func (c *Client) CloseFile(ctx context.Context, filepath string) error {
}
if cfg.Options.DebugLSP {
- logging.Debug("Closing file", "file", filepath)
+ slog.Debug("Closing file", "file", filepath)
}
if err := c.Notify(ctx, "textDocument/didClose", params); err != nil {
return err
@@ -720,12 +721,12 @@ func (c *Client) CloseAllFiles(ctx context.Context) {
for _, filePath := range filesToClose {
err := c.CloseFile(ctx, filePath)
if err != nil && cfg.Options.DebugLSP {
- logging.Warn("Error closing file", "file", filePath, "error", err)
+ slog.Warn("Error closing file", "file", filePath, "error", err)
}
}
if cfg.Options.DebugLSP {
- logging.Debug("Closed all files", "files", filesToClose)
+ slog.Debug("Closed all files", "files", filesToClose)
}
}
@@ -2,9 +2,10 @@ package lsp
import (
"encoding/json"
+ "log/slog"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/logging"
+
"github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/lsp/util"
)
@@ -18,7 +19,7 @@ func HandleWorkspaceConfiguration(params json.RawMessage) (any, error) {
func HandleRegisterCapability(params json.RawMessage) (any, error) {
var registerParams protocol.RegistrationParams
if err := json.Unmarshal(params, ®isterParams); err != nil {
- logging.Error("Error unmarshaling registration params", "error", err)
+ slog.Error("Error unmarshaling registration params", "error", err)
return nil, err
}
@@ -28,13 +29,13 @@ func HandleRegisterCapability(params json.RawMessage) (any, error) {
// Parse the registration options
optionsJSON, err := json.Marshal(reg.RegisterOptions)
if err != nil {
- logging.Error("Error marshaling registration options", "error", err)
+ slog.Error("Error marshaling registration options", "error", err)
continue
}
var options protocol.DidChangeWatchedFilesRegistrationOptions
if err := json.Unmarshal(optionsJSON, &options); err != nil {
- logging.Error("Error unmarshaling registration options", "error", err)
+ slog.Error("Error unmarshaling registration options", "error", err)
continue
}
@@ -54,7 +55,7 @@ func HandleApplyEdit(params json.RawMessage) (any, error) {
err := util.ApplyWorkspaceEdit(edit.Edit)
if err != nil {
- logging.Error("Error applying workspace edit", "error", err)
+ slog.Error("Error applying workspace edit", "error", err)
return protocol.ApplyWorkspaceEditResult{Applied: false, FailureReason: err.Error()}, nil
}
@@ -89,7 +90,7 @@ func HandleServerMessage(params json.RawMessage) {
}
if err := json.Unmarshal(params, &msg); err == nil {
if cfg.Options.DebugLSP {
- logging.Debug("Server message", "type", msg.Type, "message", msg.Message)
+ slog.Debug("Server message", "type", msg.Type, "message", msg.Message)
}
}
}
@@ -97,7 +98,7 @@ func HandleServerMessage(params json.RawMessage) {
func HandleDiagnostics(client *Client, params json.RawMessage) {
var diagParams protocol.PublishDiagnosticsParams
if err := json.Unmarshal(params, &diagParams); err != nil {
- logging.Error("Error unmarshaling diagnostics params", "error", err)
+ slog.Error("Error unmarshaling diagnostics params", "error", err)
return
}
@@ -55,7 +55,7 @@ type ApplyWorkspaceEditResult struct {
// Indicates whether the edit was applied or not.
Applied bool `json:"applied"`
// An optional textual description for why the edit was not applied.
- // This may be used by the server for diagnostic logging or to provide
+ // This may be used by the server for diagnostic slog.or to provide
// a suitable error for a request that triggered the edit.
FailureReason string `json:"failureReason,omitempty"`
// Depending on the client's failure handling strategy `failedChange` might
@@ -6,10 +6,10 @@ import (
"encoding/json"
"fmt"
"io"
+ "log/slog"
"strings"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/logging"
)
// Write writes an LSP message to the given writer
@@ -21,7 +21,7 @@ func WriteMessage(w io.Writer, msg *Message) error {
cfg := config.Get()
if cfg.Options.DebugLSP {
- logging.Debug("Sending message to server", "method", msg.Method, "id", msg.ID)
+ slog.Debug("Sending message to server", "method", msg.Method, "id", msg.ID)
}
_, err = fmt.Fprintf(w, "Content-Length: %d\r\n\r\n", len(data))
@@ -50,7 +50,7 @@ func ReadMessage(r *bufio.Reader) (*Message, error) {
line = strings.TrimSpace(line)
if cfg.Options.DebugLSP {
- logging.Debug("Received header", "line", line)
+ slog.Debug("Received header", "line", line)
}
if line == "" {
@@ -66,7 +66,7 @@ func ReadMessage(r *bufio.Reader) (*Message, error) {
}
if cfg.Options.DebugLSP {
- logging.Debug("Content-Length", "length", contentLength)
+ slog.Debug("Content-Length", "length", contentLength)
}
// Read content
@@ -77,7 +77,7 @@ func ReadMessage(r *bufio.Reader) (*Message, error) {
}
if cfg.Options.DebugLSP {
- logging.Debug("Received content", "content", string(content))
+ slog.Debug("Received content", "content", string(content))
}
// Parse message
@@ -96,7 +96,7 @@ func (c *Client) handleMessages() {
msg, err := ReadMessage(c.stdout)
if err != nil {
if cfg.Options.DebugLSP {
- logging.Error("Error reading message", "error", err)
+ slog.Error("Error reading message", "error", err)
}
return
}
@@ -104,7 +104,7 @@ func (c *Client) handleMessages() {
// Handle server->client request (has both Method and ID)
if msg.Method != "" && msg.ID != 0 {
if cfg.Options.DebugLSP {
- logging.Debug("Received request from server", "method", msg.Method, "id", msg.ID)
+ slog.Debug("Received request from server", "method", msg.Method, "id", msg.ID)
}
response := &Message{
@@ -144,7 +144,7 @@ func (c *Client) handleMessages() {
// Send response back to server
if err := WriteMessage(c.stdin, response); err != nil {
- logging.Error("Error sending response to server", "error", err)
+ slog.Error("Error sending response to server", "error", err)
}
continue
@@ -158,11 +158,11 @@ func (c *Client) handleMessages() {
if ok {
if cfg.Options.DebugLSP {
- logging.Debug("Handling notification", "method", msg.Method)
+ slog.Debug("Handling notification", "method", msg.Method)
}
go handler(msg.Params)
} else if cfg.Options.DebugLSP {
- logging.Debug("No handler for notification", "method", msg.Method)
+ slog.Debug("No handler for notification", "method", msg.Method)
}
continue
}
@@ -175,12 +175,12 @@ func (c *Client) handleMessages() {
if ok {
if cfg.Options.DebugLSP {
- logging.Debug("Received response for request", "id", msg.ID)
+ slog.Debug("Received response for request", "id", msg.ID)
}
ch <- msg
close(ch)
} else if cfg.Options.DebugLSP {
- logging.Debug("No handler for response", "id", msg.ID)
+ slog.Debug("No handler for response", "id", msg.ID)
}
}
}
@@ -192,7 +192,7 @@ func (c *Client) Call(ctx context.Context, method string, params any, result any
id := c.nextID.Add(1)
if cfg.Options.DebugLSP {
- logging.Debug("Making call", "method", method, "id", id)
+ slog.Debug("Making call", "method", method, "id", id)
}
msg, err := NewRequest(id, method, params)
@@ -218,14 +218,14 @@ func (c *Client) Call(ctx context.Context, method string, params any, result any
}
if cfg.Options.DebugLSP {
- logging.Debug("Request sent", "method", method, "id", id)
+ slog.Debug("Request sent", "method", method, "id", id)
}
// Wait for response
resp := <-ch
if cfg.Options.DebugLSP {
- logging.Debug("Received response", "id", id)
+ slog.Debug("Received response", "id", id)
}
if resp.Error != nil {
@@ -251,7 +251,7 @@ func (c *Client) Call(ctx context.Context, method string, params any, result any
func (c *Client) Notify(ctx context.Context, method string, params any) error {
cfg := config.Get()
if cfg.Options.DebugLSP {
- logging.Debug("Sending notification", "method", method)
+ slog.Debug("Sending notification", "method", method)
}
msg, err := NewNotification(method, params)
@@ -3,6 +3,7 @@ package watcher
import (
"context"
"fmt"
+ "log/slog"
"os"
"path/filepath"
"strings"
@@ -11,7 +12,7 @@ import (
"github.com/bmatcuk/doublestar/v4"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/logging"
+
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/fsnotify/fsnotify"
@@ -45,7 +46,7 @@ func NewWorkspaceWatcher(client *lsp.Client) *WorkspaceWatcher {
func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watchers []protocol.FileSystemWatcher) {
cfg := config.Get()
- logging.Debug("Adding file watcher registrations")
+ slog.Debug("Adding file watcher registrations")
w.registrationMu.Lock()
defer w.registrationMu.Unlock()
@@ -54,33 +55,33 @@ func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watc
// Print detailed registration information for debugging
if cfg.Options.DebugLSP {
- logging.Debug("Adding file watcher registrations",
+ slog.Debug("Adding file watcher registrations",
"id", id,
"watchers", len(watchers),
"total", len(w.registrations),
)
for i, watcher := range watchers {
- logging.Debug("Registration", "index", i+1)
+ slog.Debug("Registration", "index", i+1)
// Log the GlobPattern
switch v := watcher.GlobPattern.Value.(type) {
case string:
- logging.Debug("GlobPattern", "pattern", v)
+ slog.Debug("GlobPattern", "pattern", v)
case protocol.RelativePattern:
- logging.Debug("GlobPattern", "pattern", v.Pattern)
+ slog.Debug("GlobPattern", "pattern", v.Pattern)
// Log BaseURI details
switch u := v.BaseURI.Value.(type) {
case string:
- logging.Debug("BaseURI", "baseURI", u)
+ slog.Debug("BaseURI", "baseURI", u)
case protocol.DocumentUri:
- logging.Debug("BaseURI", "baseURI", u)
+ slog.Debug("BaseURI", "baseURI", u)
default:
- logging.Debug("BaseURI", "baseURI", u)
+ slog.Debug("BaseURI", "baseURI", u)
}
default:
- logging.Debug("GlobPattern", "unknown type", fmt.Sprintf("%T", v))
+ slog.Debug("GlobPattern", "unknown type", fmt.Sprintf("%T", v))
}
// Log WatchKind
@@ -89,13 +90,13 @@ func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watc
watchKind = *watcher.Kind
}
- logging.Debug("WatchKind", "kind", watchKind)
+ slog.Debug("WatchKind", "kind", watchKind)
}
}
// Determine server type for specialized handling
serverName := getServerNameFromContext(ctx)
- logging.Debug("Server type detected", "serverName", serverName)
+ slog.Debug("Server type detected", "serverName", serverName)
// Check if this server has sent file watchers
hasFileWatchers := len(watchers) > 0
@@ -123,7 +124,7 @@ func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watc
filesOpened += highPriorityFilesOpened
if cfg.Options.DebugLSP {
- logging.Debug("Opened high-priority files",
+ slog.Debug("Opened high-priority files",
"count", highPriorityFilesOpened,
"serverName", serverName)
}
@@ -131,7 +132,7 @@ func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watc
// If we've already opened enough high-priority files, we might not need more
if filesOpened >= maxFilesToOpen {
if cfg.Options.DebugLSP {
- logging.Debug("Reached file limit with high-priority files",
+ slog.Debug("Reached file limit with high-priority files",
"filesOpened", filesOpened,
"maxFiles", maxFilesToOpen)
}
@@ -149,7 +150,7 @@ func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watc
if d.IsDir() {
if path != w.workspacePath && shouldExcludeDir(path) {
if cfg.Options.DebugLSP {
- logging.Debug("Skipping excluded directory", "path", path)
+ slog.Debug("Skipping excluded directory", "path", path)
}
return filepath.SkipDir
}
@@ -177,7 +178,7 @@ func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watc
elapsedTime := time.Since(startTime)
if cfg.Options.DebugLSP {
- logging.Debug("Limited workspace scan complete",
+ slog.Debug("Limited workspace scan complete",
"filesOpened", filesOpened,
"maxFiles", maxFilesToOpen,
"elapsedTime", elapsedTime.Seconds(),
@@ -186,11 +187,11 @@ func (w *WorkspaceWatcher) AddRegistrations(ctx context.Context, id string, watc
}
if err != nil && cfg.Options.DebugLSP {
- logging.Debug("Error scanning workspace for files to open", "error", err)
+ slog.Debug("Error scanning workspace for files to open", "error", err)
}
}()
} else if cfg.Options.DebugLSP {
- logging.Debug("Using on-demand file loading for server", "server", serverName)
+ slog.Debug("Using on-demand file loading for server", "server", serverName)
}
}
@@ -266,7 +267,7 @@ func (w *WorkspaceWatcher) openHighPriorityFiles(ctx context.Context, serverName
matches, err := doublestar.Glob(os.DirFS(w.workspacePath), pattern)
if err != nil {
if cfg.Options.DebugLSP {
- logging.Debug("Error finding high-priority files", "pattern", pattern, "error", err)
+ slog.Debug("Error finding high-priority files", "pattern", pattern, "error", err)
}
continue
}
@@ -300,12 +301,12 @@ func (w *WorkspaceWatcher) openHighPriorityFiles(ctx context.Context, serverName
fullPath := filesToOpen[j]
if err := w.client.OpenFile(ctx, fullPath); err != nil {
if cfg.Options.DebugLSP {
- logging.Debug("Error opening high-priority file", "path", fullPath, "error", err)
+ slog.Debug("Error opening high-priority file", "path", fullPath, "error", err)
}
} else {
filesOpened++
if cfg.Options.DebugLSP {
- logging.Debug("Opened high-priority file", "path", fullPath)
+ slog.Debug("Opened high-priority file", "path", fullPath)
}
}
}
@@ -334,7 +335,7 @@ func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath str
}
serverName := getServerNameFromContext(ctx)
- logging.Debug("Starting workspace watcher", "workspacePath", workspacePath, "serverName", serverName)
+ slog.Debug("Starting workspace watcher", "workspacePath", workspacePath, "serverName", serverName)
// Register handler for file watcher registrations from the server
lsp.RegisterFileWatchHandler(func(id string, watchers []protocol.FileSystemWatcher) {
@@ -343,7 +344,7 @@ func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath str
watcher, err := fsnotify.NewWatcher()
if err != nil {
- logging.Error("Error creating watcher", "error", err)
+ slog.Error("Error creating watcher", "error", err)
}
defer watcher.Close()
@@ -357,7 +358,7 @@ func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath str
if d.IsDir() && path != workspacePath {
if shouldExcludeDir(path) {
if cfg.Options.DebugLSP {
- logging.Debug("Skipping excluded directory", "path", path)
+ slog.Debug("Skipping excluded directory", "path", path)
}
return filepath.SkipDir
}
@@ -367,14 +368,14 @@ func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath str
if d.IsDir() {
err = watcher.Add(path)
if err != nil {
- logging.Error("Error watching path", "path", path, "error", err)
+ slog.Error("Error watching path", "path", path, "error", err)
}
}
return nil
})
if err != nil {
- logging.Error("Error walking workspace", "error", err)
+ slog.Error("Error walking workspace", "error", err)
}
// Event loop
@@ -396,7 +397,7 @@ func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath str
// Skip excluded directories
if !shouldExcludeDir(event.Name) {
if err := watcher.Add(event.Name); err != nil {
- logging.Error("Error adding directory to watcher", "path", event.Name, "error", err)
+ slog.Error("Error adding directory to watcher", "path", event.Name, "error", err)
}
}
} else {
@@ -411,7 +412,7 @@ func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath str
// Debug logging
if cfg.Options.DebugLSP {
matched, kind := w.isPathWatched(event.Name)
- logging.Debug("File event",
+ slog.Debug("File event",
"path", event.Name,
"operation", event.Op.String(),
"watched", matched,
@@ -431,7 +432,7 @@ func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath str
// Just send the notification if needed
info, err := os.Stat(event.Name)
if err != nil {
- logging.Error("Error getting file info", "path", event.Name, "error", err)
+ slog.Error("Error getting file info", "path", event.Name, "error", err)
return
}
if !info.IsDir() && watchKind&protocol.WatchCreate != 0 {
@@ -459,7 +460,7 @@ func (w *WorkspaceWatcher) WatchWorkspace(ctx context.Context, workspacePath str
if !ok {
return
}
- logging.Error("Error watching file", "error", err)
+ slog.Error("Error watching file", "error", err)
}
}
}
@@ -584,7 +585,7 @@ func matchesSimpleGlob(pattern, path string) bool {
// Fall back to simple matching for simpler patterns
matched, err := filepath.Match(pattern, path)
if err != nil {
- logging.Error("Error matching pattern", "pattern", pattern, "path", path, "error", err)
+ slog.Error("Error matching pattern", "pattern", pattern, "path", path, "error", err)
return false
}
@@ -595,7 +596,7 @@ func matchesSimpleGlob(pattern, path string) bool {
func (w *WorkspaceWatcher) matchesPattern(path string, pattern protocol.GlobPattern) bool {
patternInfo, err := pattern.AsPattern()
if err != nil {
- logging.Error("Error parsing pattern", "pattern", pattern, "error", err)
+ slog.Error("Error parsing pattern", "pattern", pattern, "error", err)
return false
}
@@ -620,7 +621,7 @@ func (w *WorkspaceWatcher) matchesPattern(path string, pattern protocol.GlobPatt
// Make path relative to basePath for matching
relPath, err := filepath.Rel(basePath, path)
if err != nil {
- logging.Error("Error getting relative path", "path", path, "basePath", basePath, "error", err)
+ slog.Error("Error getting relative path", "path", path, "basePath", basePath, "error", err)
return false
}
relPath = filepath.ToSlash(relPath)
@@ -663,14 +664,14 @@ func (w *WorkspaceWatcher) handleFileEvent(ctx context.Context, uri string, chan
} else if changeType == protocol.FileChangeType(protocol.Changed) && w.client.IsFileOpen(filePath) {
err := w.client.NotifyChange(ctx, filePath)
if err != nil {
- logging.Error("Error notifying change", "error", err)
+ slog.Error("Error notifying change", "error", err)
}
return
}
// Notify LSP server about the file event using didChangeWatchedFiles
if err := w.notifyFileEvent(ctx, uri, changeType); err != nil {
- logging.Error("Error notifying LSP server about file event", "error", err)
+ slog.Error("Error notifying LSP server about file event", "error", err)
}
}
@@ -678,7 +679,7 @@ func (w *WorkspaceWatcher) handleFileEvent(ctx context.Context, uri string, chan
func (w *WorkspaceWatcher) notifyFileEvent(ctx context.Context, uri string, changeType protocol.FileChangeType) error {
cfg := config.Get()
if cfg.Options.DebugLSP {
- logging.Debug("Notifying file event",
+ slog.Debug("Notifying file event",
"uri", uri,
"changeType", changeType,
)
@@ -853,7 +854,7 @@ func shouldExcludeFile(filePath string) bool {
// Skip large files
if info.Size() > maxFileSize {
if cfg.Options.DebugLSP {
- logging.Debug("Skipping large file",
+ slog.Debug("Skipping large file",
"path", filePath,
"size", info.Size(),
"maxSize", maxFileSize,
@@ -891,10 +892,10 @@ func (w *WorkspaceWatcher) openMatchingFile(ctx context.Context, path string) {
// This helps with project initialization for certain language servers
if isHighPriorityFile(path, serverName) {
if cfg.Options.DebugLSP {
- logging.Debug("Opening high-priority file", "path", path, "serverName", serverName)
+ slog.Debug("Opening high-priority file", "path", path, "serverName", serverName)
}
if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- logging.Error("Error opening high-priority file", "path", path, "error", err)
+ slog.Error("Error opening high-priority file", "path", path, "error", err)
}
return
}
@@ -906,7 +907,7 @@ func (w *WorkspaceWatcher) openMatchingFile(ctx context.Context, path string) {
// Check file size - for preloading we're more conservative
if info.Size() > (1 * 1024 * 1024) { // 1MB limit for preloaded files
if cfg.Options.DebugLSP {
- logging.Debug("Skipping large file for preloading", "path", path, "size", info.Size())
+ slog.Debug("Skipping large file for preloading", "path", path, "size", info.Size())
}
return
}
@@ -938,7 +939,7 @@ func (w *WorkspaceWatcher) openMatchingFile(ctx context.Context, path string) {
if shouldOpen {
// Don't need to check if it's already open - the client.OpenFile handles that
if err := w.client.OpenFile(ctx, path); err != nil && cfg.Options.DebugLSP {
- logging.Error("Error opening file", "path", path, "error", err)
+ slog.Error("Error opening file", "path", path, "error", err)
}
}
}
@@ -6,7 +6,6 @@ import (
"slices"
"sync"
- "github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/pubsub"
"github.com/google/uuid"
)
@@ -44,6 +43,7 @@ type Service interface {
type permissionService struct {
*pubsub.Broker[PermissionRequest]
+ workingDir string
sessionPermissions []PermissionRequest
sessionPermissionsMu sync.RWMutex
pendingRequests sync.Map
@@ -87,7 +87,7 @@ func (s *permissionService) Request(opts CreatePermissionRequest) bool {
dir := filepath.Dir(opts.Path)
if dir == "." {
- dir = config.WorkingDirectory()
+ dir = s.workingDir
}
permission := PermissionRequest{
ID: uuid.New().String(),
@@ -125,9 +125,10 @@ func (s *permissionService) AutoApproveSession(sessionID string) {
s.autoApproveSessionsMu.Unlock()
}
-func NewPermissionService() Service {
+func NewPermissionService(workingDir string) Service {
return &permissionService{
Broker: pubsub.NewBroker[PermissionRequest](),
+ workingDir: workingDir,
sessionPermissions: make([]PermissionRequest, 0),
}
}
@@ -1,9 +1,8 @@
package shell
import (
+ "log/slog"
"sync"
-
- "github.com/charmbracelet/crush/internal/logging"
)
// PersistentShell is a singleton shell instance that maintains state across the application
@@ -30,9 +29,9 @@ func GetPersistentShell(cwd string) *PersistentShell {
return shellInstance
}
-// loggingAdapter adapts the internal logging package to the Logger interface
+// slog.dapter adapts the internal slog.package to the Logger interface
type loggingAdapter struct{}
func (l *loggingAdapter) InfoPersist(msg string, keysAndValues ...interface{}) {
- logging.InfoPersist(msg, keysAndValues...)
+ slog.Info(msg, keysAndValues...)
}
@@ -261,7 +261,7 @@ func (a Anim) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
// View renders the current state of the animation.
-func (a Anim) View() tea.View {
+func (a Anim) View() string {
var b strings.Builder
for i := range a.width {
switch {
@@ -284,7 +284,8 @@ func (a Anim) View() tea.View {
if a.initialized && a.labelWidth > 0 {
b.WriteString(a.ellipsisFrames[a.ellipsisStep/ellipsisAnimSpeed])
}
- return tea.NewView(b.String())
+
+ return b.String()
}
// Step is a command that triggers the next step in the animation.
@@ -50,20 +50,20 @@ func (m model) View() tea.View {
}
v := tea.NewView("")
- v.SetBackgroundColor(m.bgColor)
+ v.BackgroundColor = m.bgColor
if m.quitting {
return v
}
if a, ok := m.anim.(anim.Anim); ok {
- l := lipgloss.NewLayer(a.View().String()).
+ l := lipgloss.NewLayer(a.View()).
Width(a.Width()).
X(m.w/2 - a.Width()/2).
Y(m.h / 2)
v = tea.NewView(lipgloss.NewCanvas(l))
- v.SetBackgroundColor(m.bgColor)
+ v.BackgroundColor = m.bgColor
return v
}
return v
@@ -101,12 +101,10 @@ func (m *messageListCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
// View renders the message list or an initial screen if empty.
-func (m *messageListCmp) View() tea.View {
- return tea.NewView(
- lipgloss.JoinVertical(
- lipgloss.Left,
- m.listCmp.View().String(),
- ),
+func (m *messageListCmp) View() string {
+ return lipgloss.JoinVertical(
+ lipgloss.Left,
+ m.listCmp.View(),
)
}
@@ -14,7 +14,6 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/fsext"
- "github.com/charmbracelet/crush/internal/logging"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/components/chat"
@@ -153,8 +152,7 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, nil
case filepicker.FilePickedMsg:
if len(m.attachments) >= maxAttachments {
- logging.ErrorPersist(fmt.Sprintf("cannot add more than %d images", maxAttachments))
- return m, cmd
+ return m, util.ReportError(fmt.Errorf("cannot add more than %d images", maxAttachments))
}
m.attachments = append(m.attachments, msg.Attachment)
return m, nil
@@ -265,20 +263,22 @@ func (m *editorCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, tea.Batch(cmds...)
}
-func (m *editorCmp) View() tea.View {
- t := styles.CurrentTheme()
+func (m *editorCmp) Cursor() *tea.Cursor {
cursor := m.textarea.Cursor()
if cursor != nil {
cursor.X = cursor.X + m.x + 1
cursor.Y = cursor.Y + m.y + 1 // adjust for padding
}
+ return cursor
+}
+
+func (m *editorCmp) View() string {
+ t := styles.CurrentTheme()
if len(m.attachments) == 0 {
content := t.S().Base.Padding(1).Render(
m.textarea.View(),
)
- view := tea.NewView(content)
- view.SetCursor(cursor)
- return view
+ return content
}
content := t.S().Base.Padding(0, 1, 1, 1).Render(
lipgloss.JoinVertical(lipgloss.Top,
@@ -286,9 +286,7 @@ func (m *editorCmp) View() tea.View {
m.textarea.View(),
),
)
- view := tea.NewView(content)
- view.SetCursor(cursor)
- return view
+ return content
}
func (m *editorCmp) SetSize(width, height int) tea.Cmd {
@@ -57,9 +57,9 @@ func (p *header) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return p, nil
}
-func (p *header) View() tea.View {
+func (p *header) View() string {
if p.session.ID == "" {
- return tea.NewView("")
+ return ""
}
t := styles.CurrentTheme()
@@ -86,12 +86,12 @@ func (p *header) View() tea.View {
parts...,
),
)
- return tea.NewView(content)
+ return content
}
func (h *header) details() string {
t := styles.CurrentTheme()
- cwd := fsext.DirTrim(fsext.PrettyPath(config.WorkingDirectory()), 4)
+ cwd := fsext.DirTrim(fsext.PrettyPath(config.Get().WorkingDir()), 4)
parts := []string{
t.S().Muted.Render(cwd),
}
@@ -111,7 +111,8 @@ func (h *header) details() string {
parts = append(parts, t.S().Error.Render(fmt.Sprintf("%s%d", styles.ErrorIcon, errorCount)))
}
- model := config.GetAgentModel(config.AgentCoder)
+ agentCfg := config.Get().Agents["coder"]
+ model := config.Get().GetModelByType(agentCfg.Model)
percentage := (float64(h.session.CompletionTokens+h.session.PromptTokens) / float64(model.ContextWindow)) * 100
formattedPercentage := t.S().Muted.Render(fmt.Sprintf("%d%%", int(percentage)))
parts = append(parts, formattedPercentage)
@@ -10,7 +10,6 @@ import (
"github.com/charmbracelet/lipgloss/v2"
"github.com/charmbracelet/crush/internal/config"
- "github.com/charmbracelet/crush/internal/fur/provider"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/tui/components/anim"
"github.com/charmbracelet/crush/internal/tui/components/core"
@@ -89,20 +88,20 @@ func (m *messageCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// View renders the message component based on its current state.
// Returns different views for spinning, user, and assistant messages.
-func (m *messageCmp) View() tea.View {
+func (m *messageCmp) View() string {
if m.spinning {
- return tea.NewView(m.style().PaddingLeft(1).Render(m.anim.View().String()))
+ return m.style().PaddingLeft(1).Render(m.anim.View())
}
if m.message.ID != "" {
// this is a user or assistant message
switch m.message.Role {
case message.User:
- return tea.NewView(m.renderUserMessage())
+ return m.renderUserMessage()
default:
- return tea.NewView(m.renderAssistantMessage())
+ return m.renderAssistantMessage()
}
}
- return tea.NewView(m.style().Render("No message content"))
+ return m.style().Render("No message content")
}
// GetMessage returns the underlying message data
@@ -289,20 +288,18 @@ func (m *assistantSectionModel) Update(tea.Msg) (tea.Model, tea.Cmd) {
return m, nil
}
-func (m *assistantSectionModel) View() tea.View {
+func (m *assistantSectionModel) View() string {
t := styles.CurrentTheme()
finishData := m.message.FinishPart()
finishTime := time.Unix(finishData.Time, 0)
duration := finishTime.Sub(m.lastUserMessageTime)
infoMsg := t.S().Subtle.Render(duration.String())
icon := t.S().Subtle.Render(styles.ModelIcon)
- model := config.GetProviderModel(provider.InferenceProvider(m.message.Provider), m.message.Model)
- modelFormatted := t.S().Muted.Render(model.Name)
+ model := config.Get().GetModel(m.message.Provider, m.message.Model)
+ modelFormatted := t.S().Muted.Render(model.Model)
assistant := fmt.Sprintf("%s %s %s", icon, modelFormatted, infoMsg)
- return tea.NewView(
- t.S().Base.PaddingLeft(2).Render(
- core.Section(assistant, m.width-2),
- ),
+ return t.S().Base.PaddingLeft(2).Render(
+ core.Section(assistant, m.width-2),
)
}
@@ -7,10 +7,10 @@ import (
"time"
"github.com/charmbracelet/crush/internal/fsext"
- "github.com/charmbracelet/crush/internal/highlight"
"github.com/charmbracelet/crush/internal/llm/agent"
"github.com/charmbracelet/crush/internal/llm/tools"
"github.com/charmbracelet/crush/internal/tui/components/core"
+ "github.com/charmbracelet/crush/internal/tui/highlight"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/lipgloss/v2"
"github.com/charmbracelet/lipgloss/v2/tree"
@@ -542,7 +542,7 @@ func (tr agentRenderer) Render(v *toolCallCmp) string {
if v.result.ToolCallID == "" {
v.spinning = true
- parts = append(parts, v.anim.View().String())
+ parts = append(parts, v.anim.View())
} else {
v.spinning = false
}
@@ -145,19 +145,19 @@ func (m *toolCallCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// View renders the tool call component based on its current state.
// Shows either a pending animation or the tool-specific rendered result.
-func (m *toolCallCmp) View() tea.View {
+func (m *toolCallCmp) View() string {
box := m.style()
if !m.call.Finished && !m.cancelled {
- return tea.NewView(box.Render(m.renderPending()))
+ return box.Render(m.renderPending())
}
r := registry.lookup(m.call.Name)
if m.isNested {
- return tea.NewView(box.Render(r.Render(m)))
+ return box.Render(r.Render(m))
}
- return tea.NewView(box.Render(r.Render(m)))
+ return box.Render(r.Render(m))
}
// State management methods
@@ -13,7 +13,7 @@ import (
"github.com/charmbracelet/crush/internal/diff"
"github.com/charmbracelet/crush/internal/fsext"
"github.com/charmbracelet/crush/internal/history"
- "github.com/charmbracelet/crush/internal/logging"
+
"github.com/charmbracelet/crush/internal/lsp"
"github.com/charmbracelet/crush/internal/lsp/protocol"
"github.com/charmbracelet/crush/internal/pubsub"
@@ -94,7 +94,6 @@ func (m *sidebarCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
case chat.SessionClearedMsg:
m.session = session.Session{}
case pubsub.Event[history.File]:
- logging.Info("sidebar", "Received file history event", "file", msg.Payload.Path, "session", msg.Payload.SessionID)
return m, m.handleFileHistoryEvent(msg)
case pubsub.Event[session.Session]:
if msg.Type == pubsub.UpdatedEvent {
@@ -106,7 +105,7 @@ func (m *sidebarCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, nil
}
-func (m *sidebarCmp) View() tea.View {
+func (m *sidebarCmp) View() string {
t := styles.CurrentTheme()
parts := []string{}
if !m.compactMode {
@@ -138,9 +137,7 @@ func (m *sidebarCmp) View() tea.View {
m.mcpBlock(),
)
- return tea.NewView(
- lipgloss.JoinVertical(lipgloss.Left, parts...),
- )
+ return lipgloss.JoinVertical(lipgloss.Left, parts...)
}
func (m *sidebarCmp) handleFileHistoryEvent(event pubsub.Event[history.File]) tea.Cmd {
@@ -161,6 +158,8 @@ func (m *sidebarCmp) handleFileHistoryEvent(event pubsub.Event[history.File]) te
before := existing.History.initialVersion.Content
after := existing.History.latestVersion.Content
path := existing.History.initialVersion.Path
+ cwd := config.Get().WorkingDir()
+ path = strings.TrimPrefix(path, cwd)
_, additions, deletions := diff.GenerateDiff(before, after, path)
existing.Additions = additions
existing.Deletions = deletions
@@ -214,7 +213,9 @@ func (m *sidebarCmp) loadSessionFiles() tea.Msg {
sessionFiles := make([]SessionFile, 0, len(fileMap))
for path, fh := range fileMap {
- _, additions, deletions := diff.GenerateDiff(fh.initialVersion.Content, fh.latestVersion.Content, fh.initialVersion.Path)
+ cwd := config.Get().WorkingDir()
+ path = strings.TrimPrefix(path, cwd)
+ _, additions, deletions := diff.GenerateDiff(fh.initialVersion.Content, fh.latestVersion.Content, path)
sessionFiles = append(sessionFiles, SessionFile{
History: fh,
FilePath: path,
@@ -297,7 +298,7 @@ func (m *sidebarCmp) filesBlock() string {
}
extraContent := strings.Join(statusParts, " ")
- cwd := config.WorkingDirectory() + string(os.PathSeparator)
+ cwd := config.Get().WorkingDir() + string(os.PathSeparator)
filePath := file.FilePath
filePath = strings.TrimPrefix(filePath, cwd)
filePath = fsext.DirTrim(fsext.PrettyPath(filePath), 2)
@@ -474,12 +475,13 @@ func formatTokensAndCost(tokens, contextWindow int64, cost float64) string {
}
func (s *sidebarCmp) currentModelBlock() string {
- model := config.GetAgentModel(config.AgentCoder)
+ agentCfg := config.Get().Agents["coder"]
+ model := config.Get().GetModelByType(agentCfg.Model)
t := styles.CurrentTheme()
modelIcon := t.S().Base.Foreground(t.FgSubtle).Render(styles.ModelIcon)
- modelName := t.S().Text.Render(model.Name)
+ modelName := t.S().Text.Render(model.Model)
modelInfo := fmt.Sprintf("%s %s", modelIcon, modelName)
parts := []string{
modelInfo,
@@ -507,7 +509,7 @@ func (m *sidebarCmp) SetSession(session session.Session) tea.Cmd {
}
func cwd() string {
- cwd := config.WorkingDirectory()
+ cwd := config.Get().WorkingDir()
t := styles.CurrentTheme()
// Replace home directory with ~, unless we're at the top level of the
// home directory).
@@ -60,9 +60,9 @@ func (s *splashCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
// View implements SplashPage.
-func (s *splashCmp) View() tea.View {
+func (s *splashCmp) View() string {
content := lipgloss.JoinVertical(lipgloss.Left, s.logoRendered)
- return tea.NewView(content)
+ return content
}
func (s *splashCmp) logoBlock() string {
@@ -157,15 +157,12 @@ func (c *completionsCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
// View implements Completions.
-func (c *completionsCmp) View() tea.View {
+func (c *completionsCmp) View() string {
if len(c.list.Items()) == 0 {
- return tea.NewView(c.style().Render("No completions found"))
+ return c.style().Render("No completions found")
}
- view := tea.NewView(
- c.style().Render(c.list.View().String()),
- )
- return view
+ return c.style().Render(c.list.View())
}
func (c *completionsCmp) style() lipgloss.Style {
@@ -75,7 +75,7 @@ func (c *completionItemCmp) Update(tea.Msg) (tea.Model, tea.Cmd) {
}
// View implements CommandItem.
-func (c *completionItemCmp) View() tea.View {
+func (c *completionItemCmp) View() string {
t := styles.CurrentTheme()
itemStyle := t.S().Base.Padding(0, 1).Width(c.width)
@@ -135,7 +135,7 @@ func (c *completionItemCmp) View() tea.View {
parts...,
),
)
- return tea.NewView(item)
+ return item
}
// Blur implements CommandItem.
@@ -5,7 +5,7 @@ import (
"strings"
"github.com/alecthomas/chroma/v2"
- "github.com/charmbracelet/crush/internal/exp/diffview"
+ "github.com/charmbracelet/crush/internal/tui/exp/diffview"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/lipgloss/v2"
"github.com/charmbracelet/x/ansi"
@@ -72,7 +72,14 @@ func (c *container) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
}
-func (c *container) View() tea.View {
+func (c *container) Cursor() *tea.Cursor {
+ if cursor, ok := c.content.(util.Cursor); ok {
+ return cursor.Cursor()
+ }
+ return nil
+}
+
+func (c *container) View() string {
t := styles.CurrentTheme()
width := c.width
height := c.height
@@ -106,10 +113,7 @@ func (c *container) View() tea.View {
PaddingLeft(c.paddingLeft)
contentView := c.content.View()
- view := tea.NewView(style.Render(contentView.String()))
- cursor := contentView.Cursor()
- view.SetCursor(cursor)
- return view
+ return style.Render(contentView)
}
func (c *container) SetSize(width, height int) tea.Cmd {
@@ -1,9 +1,11 @@
package layout
import (
+ "log/slog"
+
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
- "github.com/charmbracelet/crush/internal/logging"
+
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
"github.com/charmbracelet/lipgloss/v2"
@@ -104,17 +106,34 @@ func (s *splitPaneLayout) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return s, tea.Batch(cmds...)
}
-func (s *splitPaneLayout) View() tea.View {
+func (s *splitPaneLayout) Cursor() *tea.Cursor {
+ if s.bottomPanel != nil {
+ if c, ok := s.bottomPanel.(util.Cursor); ok {
+ return c.Cursor()
+ }
+ } else if s.rightPanel != nil {
+ if c, ok := s.rightPanel.(util.Cursor); ok {
+ return c.Cursor()
+ }
+ } else if s.leftPanel != nil {
+ if c, ok := s.leftPanel.(util.Cursor); ok {
+ return c.Cursor()
+ }
+ }
+ return nil
+}
+
+func (s *splitPaneLayout) View() string {
var topSection string
if s.leftPanel != nil && s.rightPanel != nil {
leftView := s.leftPanel.View()
rightView := s.rightPanel.View()
- topSection = lipgloss.JoinHorizontal(lipgloss.Top, leftView.String(), rightView.String())
+ topSection = lipgloss.JoinHorizontal(lipgloss.Top, leftView, rightView)
} else if s.leftPanel != nil {
- topSection = s.leftPanel.View().String()
+ topSection = s.leftPanel.View()
} else if s.rightPanel != nil {
- topSection = s.rightPanel.View().String()
+ topSection = s.rightPanel.View()
} else {
topSection = ""
}
@@ -123,38 +142,26 @@ func (s *splitPaneLayout) View() tea.View {
if s.bottomPanel != nil && topSection != "" {
bottomView := s.bottomPanel.View()
- finalView = lipgloss.JoinVertical(lipgloss.Left, topSection, bottomView.String())
+ finalView = lipgloss.JoinVertical(lipgloss.Left, topSection, bottomView)
} else if s.bottomPanel != nil {
- finalView = s.bottomPanel.View().String()
+ finalView = s.bottomPanel.View()
} else {
finalView = topSection
}
- // TODO: think of a better way to handle multiple cursors
- var cursor *tea.Cursor
- if s.bottomPanel != nil {
- cursor = s.bottomPanel.View().Cursor()
- } else if s.rightPanel != nil {
- cursor = s.rightPanel.View().Cursor()
- } else if s.leftPanel != nil {
- cursor = s.leftPanel.View().Cursor()
- }
-
t := styles.CurrentTheme()
style := t.S().Base.
Width(s.width).
Height(s.height)
- view := tea.NewView(style.Render(finalView))
- view.SetCursor(cursor)
- return view
+ return style.Render(finalView)
}
func (s *splitPaneLayout) SetSize(width, height int) tea.Cmd {
s.width = width
s.height = height
- logging.Info("Setting split pane size", "width", width, "height", height)
+ slog.Info("Setting split pane size", "width", width, "height", height)
var topHeight, bottomHeight int
var cmds []tea.Cmd
@@ -280,12 +280,20 @@ func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, nil
}
+// Cursor returns the current cursor position in the input field.
+func (m *model) Cursor() *tea.Cursor {
+ if m.filterable && !m.hideFilterInput {
+ return m.input.Cursor()
+ }
+ return nil
+}
+
// View renders the list to a string for display.
// Returns empty string if the list has no dimensions.
// Triggers re-rendering if needed before returning content.
-func (m *model) View() tea.View {
+func (m *model) View() string {
if m.viewState.height == 0 || m.viewState.width == 0 {
- return tea.NewView("") // No content to display
+ return "" // No content to display
}
if m.renderState.needsRerender {
m.renderVisible()
@@ -303,11 +311,7 @@ func (m *model) View() tea.View {
content,
)
}
- view := tea.NewView(content)
- if m.filterable && !m.hideFilterInput {
- view.SetCursor(m.input.Cursor())
- }
- return view
+ return content
}
// handleKeyPress processes keyboard input for list navigation.
@@ -833,7 +837,7 @@ func (m *model) rerenderItem(inx int) {
func (m *model) getItemLines(item util.Model) []string {
var itemLines []string
- itemLines = strings.Split(item.View().String(), "\n")
+ itemLines = strings.Split(item.View(), "\n")
if m.gapSize > 0 {
gap := make([]string, m.gapSize)
@@ -1261,7 +1265,7 @@ func (m *model) filterSection(sect section, search string) *section {
// Check if section header itself matches
if sect.header != nil {
- headerText := strings.ToLower(sect.header.View().String())
+ headerText := strings.ToLower(sect.header.View())
if strings.Contains(headerText, search) {
hasHeaderMatch = true
// If header matches, include all items in the section
@@ -6,8 +6,6 @@ import (
"github.com/charmbracelet/bubbles/v2/help"
tea "github.com/charmbracelet/bubbletea/v2"
- "github.com/charmbracelet/crush/internal/logging"
- "github.com/charmbracelet/crush/internal/pubsub"
"github.com/charmbracelet/crush/internal/session"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
@@ -58,49 +56,17 @@ func (m *statusCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, m.clearMessageCmd(ttl)
case util.ClearStatusMsg:
m.info = util.InfoMsg{}
-
- // Handle persistent logs
- case pubsub.Event[logging.LogMessage]:
- if msg.Payload.Persist {
- switch msg.Payload.Level {
- case "error":
- m.info = util.InfoMsg{
- Type: util.InfoTypeError,
- Msg: msg.Payload.Message,
- TTL: msg.Payload.PersistTime,
- }
- case "info":
- m.info = util.InfoMsg{
- Type: util.InfoTypeInfo,
- Msg: msg.Payload.Message,
- TTL: msg.Payload.PersistTime,
- }
- case "warn":
- m.info = util.InfoMsg{
- Type: util.InfoTypeWarn,
- Msg: msg.Payload.Message,
- TTL: msg.Payload.PersistTime,
- }
- default:
- m.info = util.InfoMsg{
- Type: util.InfoTypeInfo,
- Msg: msg.Payload.Message,
- TTL: msg.Payload.PersistTime,
- }
- }
- return m, m.clearMessageCmd(m.info.TTL)
- }
}
return m, nil
}
-func (m *statusCmp) View() tea.View {
+func (m *statusCmp) View() string {
t := styles.CurrentTheme()
status := t.S().Base.Padding(0, 1, 1, 1).Render(m.help.View(m.keyMap))
if m.info.Msg != "" {
status = m.infoMsg()
}
- return tea.NewView(status)
+ return status
}
func (m *statusCmp) infoMsg() string {
@@ -139,7 +139,7 @@ func (c *commandArgumentsDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
// View implements CommandArgumentsDialog.
-func (c *commandArgumentsDialogCmp) View() tea.View {
+func (c *commandArgumentsDialogCmp) View() string {
t := styles.CurrentTheme()
baseStyle := t.S().Base
@@ -188,19 +188,19 @@ func (c *commandArgumentsDialogCmp) View() tea.View {
elements...,
)
- view := tea.NewView(
- baseStyle.Padding(1, 1, 0, 1).
- Border(lipgloss.RoundedBorder()).
- BorderForeground(t.BorderFocus).
- Width(c.width).
- Render(content),
- )
+ return baseStyle.Padding(1, 1, 0, 1).
+ Border(lipgloss.RoundedBorder()).
+ BorderForeground(t.BorderFocus).
+ Width(c.width).
+ Render(content)
+}
+
+func (c *commandArgumentsDialogCmp) Cursor() *tea.Cursor {
cursor := c.inputs[c.focusIndex].Cursor()
if cursor != nil {
cursor = c.moveCursor(cursor)
}
- view.SetCursor(cursor)
- return view
+ return cursor
}
func (c *commandArgumentsDialogCmp) moveCursor(cursor *tea.Cursor) *tea.Cursor {
@@ -143,23 +143,29 @@ func (c *commandDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return c, nil
}
-func (c *commandDialogCmp) View() tea.View {
+func (c *commandDialogCmp) View() string {
t := styles.CurrentTheme()
- listView := c.commandList.View()
+ listView := c.commandList
radio := c.commandTypeRadio()
content := lipgloss.JoinVertical(
lipgloss.Left,
t.S().Base.Padding(0, 1, 1, 1).Render(core.Title("Commands", c.width-lipgloss.Width(radio)-5)+" "+radio),
- listView.String(),
+ listView.View(),
"",
t.S().Base.Width(c.width-2).PaddingLeft(1).AlignHorizontal(lipgloss.Left).Render(c.help.View(c.keyMap)),
)
- v := tea.NewView(c.style().Render(content))
- if listView.Cursor() != nil {
- c := c.moveCursor(listView.Cursor())
- v.SetCursor(c)
+ return c.style().Render(content)
+}
+
+func (c *commandDialogCmp) Cursor() *tea.Cursor {
+ if cursor, ok := c.commandList.(util.Cursor); ok {
+ cursor := cursor.Cursor()
+ if cursor != nil {
+ cursor = c.moveCursor(cursor)
+ }
+ return cursor
}
- return v
+ return nil
}
func (c *commandDialogCmp) commandTypeRadio() string {
@@ -35,12 +35,12 @@ func (m *itemSectionModel) Update(tea.Msg) (tea.Model, tea.Cmd) {
return m, nil
}
-func (m *itemSectionModel) View() tea.View {
+func (m *itemSectionModel) View() string {
t := styles.CurrentTheme()
title := ansi.Truncate(m.title, m.width-2, "…")
style := t.S().Base.Padding(1, 1, 0, 1)
title = t.S().Muted.Render(title)
- return tea.NewView(style.Render(core.Section(title, m.width-2)))
+ return style.Render(core.Section(title, m.width-2))
}
func (m *itemSectionModel) GetSize() (int, int) {
@@ -242,8 +242,8 @@ func (c *compactDialogCmp) render() string {
Render(dialogContent)
}
-func (c *compactDialogCmp) View() tea.View {
- return tea.NewView(c.render())
+func (c *compactDialogCmp) View() string {
+ return c.render()
}
// SetSize sets the size of the component.
@@ -37,7 +37,7 @@ type DialogCmp interface {
Dialogs() []DialogModel
HasDialogs() bool
GetLayers() []*lipgloss.Layer
- ActiveView() *tea.View
+ ActiveModel() util.Model
ActiveDialogID() DialogID
}
@@ -132,12 +132,11 @@ func (d dialogCmp) Dialogs() []DialogModel {
return d.dialogs
}
-func (d dialogCmp) ActiveView() *tea.View {
+func (d dialogCmp) ActiveModel() util.Model {
if len(d.dialogs) == 0 {
return nil
}
- view := d.dialogs[len(d.dialogs)-1].View()
- return &view
+ return d.dialogs[len(d.dialogs)-1]
}
func (d dialogCmp) ActiveDialogID() DialogID {
@@ -150,7 +149,7 @@ func (d dialogCmp) ActiveDialogID() DialogID {
func (d dialogCmp) GetLayers() []*lipgloss.Layer {
layers := []*lipgloss.Layer{}
for _, dialog := range d.Dialogs() {
- dialogView := dialog.View().String()
+ dialogView := dialog.View()
row, col := dialog.Position()
layers = append(layers, lipgloss.NewLayer(dialogView).X(col).Y(row))
}
@@ -11,7 +11,6 @@ import (
"github.com/charmbracelet/bubbles/v2/help"
"github.com/charmbracelet/bubbles/v2/key"
tea "github.com/charmbracelet/bubbletea/v2"
- "github.com/charmbracelet/crush/internal/logging"
"github.com/charmbracelet/crush/internal/message"
"github.com/charmbracelet/crush/internal/tui/components/core"
"github.com/charmbracelet/crush/internal/tui/components/dialogs"
@@ -119,18 +118,15 @@ func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
func() tea.Msg {
isFileLarge, err := ValidateFileSize(path, maxAttachmentSize)
if err != nil {
- logging.ErrorPersist("unable to read the image")
- return nil
+ return util.ReportError(fmt.Errorf("unable to read the image: %w", err))
}
if isFileLarge {
- logging.ErrorPersist("file too large, max 5MB")
- return nil
+ return util.ReportError(fmt.Errorf("file too large, max 5MB"))
}
content, err := os.ReadFile(path)
if err != nil {
- logging.ErrorPersist("Unable read selected file")
- return nil
+ return util.ReportError(fmt.Errorf("unable to read the image: %w", err))
}
mimeBufferSize := min(512, len(content))
@@ -148,7 +144,7 @@ func (m *model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, tea.Batch(cmds...)
}
-func (m *model) View() tea.View {
+func (m *model) View() string {
t := styles.CurrentTheme()
content := lipgloss.JoinVertical(
@@ -158,7 +154,7 @@ func (m *model) View() tea.View {
m.filePicker.View(),
t.S().Base.Width(m.width-2).PaddingLeft(1).AlignHorizontal(lipgloss.Left).Render(m.help.View(m.keyMap)),
)
- return tea.NewView(m.style().Render(content))
+ return m.style().Render(content)
}
func (m *model) currentImage() string {
@@ -147,8 +147,8 @@ func (m *initDialogCmp) render() string {
}
// View implements tea.Model.
-func (m *initDialogCmp) View() tea.View {
- return tea.NewView(m.render())
+func (m *initDialogCmp) View() string {
+ return m.render()
}
// SetSize sets the size of the component.
@@ -31,8 +31,8 @@ const (
// ModelSelectedMsg is sent when a model is selected
type ModelSelectedMsg struct {
- Model config.PreferredModel
- ModelType config.ModelType
+ Model config.SelectedModel
+ ModelType config.SelectedModelType
}
// CloseModelDialogMsg is sent when a model is selected
@@ -115,19 +115,19 @@ func (m *modelDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
items := m.modelList.Items()
selectedItem := items[selectedItemInx].(completions.CompletionItem).Value().(ModelOption)
- var modelType config.ModelType
+ var modelType config.SelectedModelType
if m.modelType == LargeModelType {
- modelType = config.LargeModel
+ modelType = config.SelectedModelTypeLarge
} else {
- modelType = config.SmallModel
+ modelType = config.SelectedModelTypeSmall
}
return m, tea.Sequence(
util.CmdHandler(dialogs.CloseDialogMsg{}),
util.CmdHandler(ModelSelectedMsg{
- Model: config.PreferredModel{
- ModelID: selectedItem.Model.ID,
- Provider: selectedItem.Provider.ID,
+ Model: config.SelectedModel{
+ Model: selectedItem.Model.ID,
+ Provider: string(selectedItem.Provider.ID),
},
ModelType: modelType,
}),
@@ -149,23 +149,29 @@ func (m *modelDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return m, nil
}
-func (m *modelDialogCmp) View() tea.View {
+func (m *modelDialogCmp) View() string {
t := styles.CurrentTheme()
listView := m.modelList.View()
radio := m.modelTypeRadio()
content := lipgloss.JoinVertical(
lipgloss.Left,
t.S().Base.Padding(0, 1, 1, 1).Render(core.Title("Switch Model", m.width-lipgloss.Width(radio)-5)+" "+radio),
- listView.String(),
+ listView,
"",
t.S().Base.Width(m.width-2).PaddingLeft(1).AlignHorizontal(lipgloss.Left).Render(m.help.View(m.keyMap)),
)
- v := tea.NewView(m.style().Render(content))
- if listView.Cursor() != nil {
- c := m.moveCursor(listView.Cursor())
- v.SetCursor(c)
+ return m.style().Render(content)
+}
+
+func (m *modelDialogCmp) Cursor() *tea.Cursor {
+ if cursor, ok := m.modelList.(util.Cursor); ok {
+ cursor := cursor.Cursor()
+ if cursor != nil {
+ cursor = m.moveCursor(cursor)
+ return cursor
+ }
}
- return v
+ return nil
}
func (m *modelDialogCmp) style() lipgloss.Style {
@@ -218,35 +224,39 @@ func (m *modelDialogCmp) modelTypeRadio() string {
func (m *modelDialogCmp) SetModelType(modelType int) tea.Cmd {
m.modelType = modelType
- providers := config.Providers()
+ providers, err := config.Providers()
+ if err != nil {
+ return util.ReportError(err)
+ }
+
modelItems := []util.Model{}
selectIndex := 0
cfg := config.Get()
- var currentModel config.PreferredModel
+ var currentModel config.SelectedModel
if m.modelType == LargeModelType {
- currentModel = cfg.Models.Large
+ currentModel = cfg.Models[config.SelectedModelTypeLarge]
} else {
- currentModel = cfg.Models.Small
+ currentModel = cfg.Models[config.SelectedModelTypeSmall]
}
// Create a map to track which providers we've already added
- addedProviders := make(map[provider.InferenceProvider]bool)
+ addedProviders := make(map[string]bool)
// First, add any configured providers that are not in the known providers list
// These should appear at the top of the list
knownProviders := provider.KnownProviders()
for providerID, providerConfig := range cfg.Providers {
- if providerConfig.Disabled {
+ if providerConfig.Disable {
continue
}
// Check if this provider is not in the known providers list
- if !slices.Contains(knownProviders, providerID) {
+ if !slices.Contains(knownProviders, provider.InferenceProvider(providerID)) {
// Convert config provider to provider.Provider format
configProvider := provider.Provider{
Name: string(providerID), // Use provider ID as name for unknown providers
- ID: providerID,
+ ID: provider.InferenceProvider(providerID),
Models: make([]provider.Model, len(providerConfig.Models)),
}
@@ -254,7 +264,7 @@ func (m *modelDialogCmp) SetModelType(modelType int) tea.Cmd {
for i, model := range providerConfig.Models {
configProvider.Models[i] = provider.Model{
ID: model.ID,
- Name: model.Name,
+ Model: model.Model,
CostPer1MIn: model.CostPer1MIn,
CostPer1MOut: model.CostPer1MOut,
CostPer1MInCached: model.CostPer1MInCached,
@@ -263,7 +273,7 @@ func (m *modelDialogCmp) SetModelType(modelType int) tea.Cmd {
DefaultMaxTokens: model.DefaultMaxTokens,
CanReason: model.CanReason,
HasReasoningEffort: model.HasReasoningEffort,
- DefaultReasoningEffort: model.ReasoningEffort,
+ DefaultReasoningEffort: model.DefaultReasoningEffort,
SupportsImages: model.SupportsImages,
}
}
@@ -275,11 +285,11 @@ func (m *modelDialogCmp) SetModelType(modelType int) tea.Cmd {
}
modelItems = append(modelItems, commands.NewItemSection(name))
for _, model := range configProvider.Models {
- modelItems = append(modelItems, completions.NewCompletionItem(model.Name, ModelOption{
+ modelItems = append(modelItems, completions.NewCompletionItem(model.Model, ModelOption{
Provider: configProvider,
Model: model,
}))
- if model.ID == currentModel.ModelID && configProvider.ID == currentModel.Provider {
+ if model.ID == currentModel.Model && string(configProvider.ID) == currentModel.Provider {
selectIndex = len(modelItems) - 1 // Set the selected index to the current model
}
}
@@ -290,12 +300,12 @@ func (m *modelDialogCmp) SetModelType(modelType int) tea.Cmd {
// Then add the known providers from the predefined list
for _, provider := range providers {
// Skip if we already added this provider as an unknown provider
- if addedProviders[provider.ID] {
+ if addedProviders[string(provider.ID)] {
continue
}
// Check if this provider is configured and not disabled
- if providerConfig, exists := cfg.Providers[provider.ID]; exists && providerConfig.Disabled {
+ if providerConfig, exists := cfg.Providers[string(provider.ID)]; exists && providerConfig.Disable {
continue
}
@@ -305,11 +315,11 @@ func (m *modelDialogCmp) SetModelType(modelType int) tea.Cmd {
}
modelItems = append(modelItems, commands.NewItemSection(name))
for _, model := range provider.Models {
- modelItems = append(modelItems, completions.NewCompletionItem(model.Name, ModelOption{
+ modelItems = append(modelItems, completions.NewCompletionItem(model.Model, ModelOption{
Provider: provider,
Model: model,
}))
- if model.ID == currentModel.ModelID && provider.ID == currentModel.Provider {
+ if model.ID == currentModel.Model && string(provider.ID) == currentModel.Provider {
selectIndex = len(modelItems) - 1 // Set the selected index to the current model
}
}
@@ -478,8 +478,8 @@ func (p *permissionDialogCmp) render() string {
)
}
-func (p *permissionDialogCmp) View() tea.View {
- return tea.NewView(p.render())
+func (p *permissionDialogCmp) View() string {
+ return p.render()
}
func (p *permissionDialogCmp) SetSize() tea.Cmd {
@@ -65,7 +65,7 @@ func (q *quitDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
}
// View renders the quit dialog with Yes/No buttons.
-func (q *quitDialogCmp) View() tea.View {
+func (q *quitDialogCmp) View() string {
t := styles.CurrentTheme()
baseStyle := t.S().Base
yesStyle := t.S().Text
@@ -100,9 +100,7 @@ func (q *quitDialogCmp) View() tea.View {
Border(lipgloss.RoundedBorder()).
BorderForeground(t.BorderFocus)
- return tea.NewView(
- quitDialogStyle.Render(content),
- )
+ return quitDialogStyle.Render(content)
}
func (q *quitDialogCmp) Position() (int, int) {
@@ -122,23 +122,29 @@ func (s *sessionDialogCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
return s, nil
}
-func (s *sessionDialogCmp) View() tea.View {
+func (s *sessionDialogCmp) View() string {
t := styles.CurrentTheme()
listView := s.sessionsList.View()
content := lipgloss.JoinVertical(
lipgloss.Left,
t.S().Base.Padding(0, 1, 1, 1).Render(core.Title("Switch Session", s.width-4)),
- listView.String(),
+ listView,
"",
t.S().Base.Width(s.width-2).PaddingLeft(1).AlignHorizontal(lipgloss.Left).Render(s.help.View(s.keyMap)),
)
- v := tea.NewView(s.style().Render(content))
- if listView.Cursor() != nil {
- c := s.moveCursor(listView.Cursor())
- v.SetCursor(c)
+ return s.style().Render(content)
+}
+
+func (s *sessionDialogCmp) Cursor() *tea.Cursor {
+ if cursor, ok := s.sessionsList.(util.Cursor); ok {
+ cursor := cursor.Cursor()
+ if cursor != nil {
+ cursor = s.moveCursor(cursor)
+ }
+ return cursor
}
- return v
+ return nil
}
func (s *sessionDialogCmp) style() lipgloss.Style {
@@ -1,176 +0,0 @@
-package logs
-
-import (
- "fmt"
- "strings"
- "time"
-
- "github.com/charmbracelet/bubbles/v2/viewport"
- tea "github.com/charmbracelet/bubbletea/v2"
- "github.com/charmbracelet/crush/internal/logging"
- "github.com/charmbracelet/crush/internal/tui/components/core/layout"
- "github.com/charmbracelet/crush/internal/tui/styles"
- "github.com/charmbracelet/crush/internal/tui/util"
- "github.com/charmbracelet/lipgloss/v2"
-)
-
-type DetailComponent interface {
- util.Model
- layout.Sizeable
-}
-
-type detailCmp struct {
- width, height int
- currentLog logging.LogMessage
- viewport viewport.Model
-}
-
-func (i *detailCmp) Init() tea.Cmd {
- messages := logging.List()
- if len(messages) == 0 {
- return nil
- }
- i.currentLog = messages[0]
- return nil
-}
-
-func (i *detailCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
- switch msg := msg.(type) {
- case selectedLogMsg:
- if msg.ID != i.currentLog.ID {
- i.currentLog = logging.LogMessage(msg)
- i.updateContent()
- }
- }
-
- return i, nil
-}
-
-func (i *detailCmp) updateContent() {
- var content strings.Builder
- t := styles.CurrentTheme()
-
- if i.currentLog.ID == "" {
- content.WriteString(t.S().Muted.Render("No log selected"))
- i.viewport.SetContent(content.String())
- return
- }
-
- // Level badge with background color
- levelStyle := getLevelStyle(i.currentLog.Level)
- levelBadge := levelStyle.Padding(0, 1).Render(strings.ToUpper(i.currentLog.Level))
-
- // Timestamp with relative time
- timeStr := i.currentLog.Time.Format("2006-01-05 15:04:05 UTC")
- relativeTime := getRelativeTime(i.currentLog.Time)
- timeStyle := t.S().Muted
-
- // Header line
- header := lipgloss.JoinHorizontal(
- lipgloss.Left,
- timeStr,
- " ",
- timeStyle.Render(relativeTime),
- )
-
- content.WriteString(levelBadge)
- content.WriteString("\n\n")
- content.WriteString(header)
- content.WriteString("\n\n")
-
- // Message section
- messageHeaderStyle := t.S().Base.Foreground(t.Blue).Bold(true)
- content.WriteString(messageHeaderStyle.Render("Message"))
- content.WriteString("\n")
- content.WriteString(i.currentLog.Message)
- content.WriteString("\n\n")
-
- // Attributes section
- if len(i.currentLog.Attributes) > 0 {
- attrHeaderStyle := t.S().Base.Foreground(t.Blue).Bold(true)
- content.WriteString(attrHeaderStyle.Render("Attributes"))
- content.WriteString("\n")
-
- for _, attr := range i.currentLog.Attributes {
- keyStyle := t.S().Base.Foreground(t.Accent)
- valueStyle := t.S().Text
- attrLine := fmt.Sprintf("%s: %s",
- keyStyle.Render(attr.Key),
- valueStyle.Render(attr.Value),
- )
- content.WriteString(attrLine)
- content.WriteString("\n")
- }
- }
-
- i.viewport.SetContent(content.String())
-}
-
-func getLevelStyle(level string) lipgloss.Style {
- t := styles.CurrentTheme()
- style := t.S().Base.Bold(true)
-
- switch strings.ToLower(level) {
- case "info":
- return style.Foreground(t.White).Background(t.Info)
- case "warn", "warning":
- return style.Foreground(t.White).Background(t.Warning)
- case "error", "err":
- return style.Foreground(t.White).Background(t.Error)
- case "debug":
- return style.Foreground(t.White).Background(t.Success)
- case "fatal":
- return style.Foreground(t.White).Background(t.Error)
- default:
- return style.Foreground(t.FgBase)
- }
-}
-
-func getRelativeTime(logTime time.Time) string {
- now := time.Now()
- diff := now.Sub(logTime)
-
- if diff < time.Minute {
- return fmt.Sprintf("%ds ago", int(diff.Seconds()))
- } else if diff < time.Hour {
- return fmt.Sprintf("%dm ago", int(diff.Minutes()))
- } else if diff < 24*time.Hour {
- return fmt.Sprintf("%dh ago", int(diff.Hours()))
- } else if diff < 30*24*time.Hour {
- return fmt.Sprintf("%dd ago", int(diff.Hours()/24))
- } else if diff < 365*24*time.Hour {
- return fmt.Sprintf("%dmo ago", int(diff.Hours()/(24*30)))
- } else {
- return fmt.Sprintf("%dy ago", int(diff.Hours()/(24*365)))
- }
-}
-
-func (i *detailCmp) View() tea.View {
- t := styles.CurrentTheme()
- style := t.S().Base.
- BorderStyle(lipgloss.RoundedBorder()).
- BorderForeground(t.BorderFocus).
- Width(i.width - 2). // Adjust width for border
- Height(i.height - 2). // Adjust height for border
- Padding(1)
- return tea.NewView(style.Render(i.viewport.View()))
-}
-
-func (i *detailCmp) GetSize() (int, int) {
- return i.width, i.height
-}
-
-func (i *detailCmp) SetSize(width int, height int) tea.Cmd {
- i.width = width
- i.height = height
- i.viewport.SetWidth(i.width - 4)
- i.viewport.SetHeight(i.height - 4)
- i.updateContent()
- return nil
-}
-
-func NewLogsDetails() DetailComponent {
- return &detailCmp{
- viewport: viewport.New(),
- }
-}
@@ -1,197 +0,0 @@
-package logs
-
-import (
- "fmt"
- "slices"
- "strings"
-
- "github.com/charmbracelet/bubbles/v2/table"
- tea "github.com/charmbracelet/bubbletea/v2"
- "github.com/charmbracelet/crush/internal/logging"
- "github.com/charmbracelet/crush/internal/pubsub"
- "github.com/charmbracelet/crush/internal/tui/components/core/layout"
- "github.com/charmbracelet/crush/internal/tui/styles"
- "github.com/charmbracelet/crush/internal/tui/util"
- "github.com/charmbracelet/lipgloss/v2"
-)
-
-type TableComponent interface {
- util.Model
- layout.Sizeable
-}
-
-type tableCmp struct {
- table table.Model
- logs []logging.LogMessage
-}
-
-type selectedLogMsg logging.LogMessage
-
-func (i *tableCmp) Init() tea.Cmd {
- i.logs = logging.List()
- i.setRows()
- return nil
-}
-
-func (i *tableCmp) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
- var cmds []tea.Cmd
- switch msg := msg.(type) {
- case pubsub.Event[logging.LogMessage]:
- return i, func() tea.Msg {
- if msg.Type == pubsub.CreatedEvent {
- rows := i.table.Rows()
- for _, row := range rows {
- if row[1] == msg.Payload.ID {
- return nil // If the log already exists, do not add it again
- }
- }
- i.logs = append(i.logs, msg.Payload)
- i.table.SetRows(
- append(
- []table.Row{
- logToRow(msg.Payload),
- },
- i.table.Rows()...,
- ),
- )
- }
- return selectedLogMsg(msg.Payload)
- }
- }
- t, cmd := i.table.Update(msg)
- cmds = append(cmds, cmd)
- i.table = t
-
- cmds = append(cmds, func() tea.Msg {
- for _, log := range logging.List() {
- if log.ID == i.table.SelectedRow()[1] {
- // If the selected row matches the log ID, return the selected log message
- return selectedLogMsg(log)
- }
- }
- return nil
- })
- return i, tea.Batch(cmds...)
-}
-
-func (i *tableCmp) View() tea.View {
- t := styles.CurrentTheme()
- defaultStyles := table.DefaultStyles()
-
- // Header styling
- defaultStyles.Header = defaultStyles.Header.
- Foreground(t.Primary).
- Bold(true).
- BorderStyle(lipgloss.NormalBorder()).
- BorderBottom(true).
- BorderForeground(t.Border)
-
- // Selected row styling
- defaultStyles.Selected = defaultStyles.Selected.
- Foreground(t.FgSelected).
- Background(t.Primary).
- Bold(false)
-
- // Cell styling
- defaultStyles.Cell = defaultStyles.Cell.
- Foreground(t.FgBase)
-
- i.table.SetStyles(defaultStyles)
- return tea.NewView(i.table.View())
-}
-
-func (i *tableCmp) GetSize() (int, int) {
- return i.table.Width(), i.table.Height()
-}
-
-func (i *tableCmp) SetSize(width int, height int) tea.Cmd {
- i.table.SetWidth(width)
- i.table.SetHeight(height)
-
- columnWidth := (width - 10) / 4
- i.table.SetColumns([]table.Column{
- {
- Title: "Level",
- Width: 10,
- },
- {
- Title: "ID",
- Width: columnWidth,
- },
- {
- Title: "Time",
- Width: columnWidth,
- },
- {
- Title: "Message",
- Width: columnWidth,
- },
- {
- Title: "Attributes",
- Width: columnWidth,
- },
- })
- return nil
-}
-
-func (i *tableCmp) setRows() {
- rows := []table.Row{}
-
- slices.SortFunc(i.logs, func(a, b logging.LogMessage) int {
- if a.Time.Before(b.Time) {
- return -1
- }
- if a.Time.After(b.Time) {
- return 1
- }
- return 0
- })
-
- for _, log := range i.logs {
- rows = append(rows, logToRow(log))
- }
- i.table.SetRows(rows)
-}
-
-func logToRow(log logging.LogMessage) table.Row {
- // Format attributes as JSON string
- var attrStr string
- if len(log.Attributes) > 0 {
- var parts []string
- for _, attr := range log.Attributes {
- parts = append(parts, fmt.Sprintf(`{"Key":"%s","Value":"%s"}`, attr.Key, attr.Value))
- }
- attrStr = "[" + strings.Join(parts, ",") + "]"
- }
-
- // Format time with relative time
- timeStr := log.Time.Format("2006-01-05 15:04:05 UTC")
- relativeTime := getRelativeTime(log.Time)
- fullTimeStr := timeStr + " " + relativeTime
-
- return table.Row{
- strings.ToUpper(log.Level),
- log.ID,
- fullTimeStr,
- log.Message,
- attrStr,
- }
-}
-
-func NewLogsTable() TableComponent {
- columns := []table.Column{
- {Title: "Level"},
- {Title: "ID"},
- {Title: "Time"},
- {Title: "Message"},
- {Title: "Attributes"},
- }
-
- tableModel := table.New(
- table.WithColumns(columns),
- )
- tableModel.Focus()
- return &tableCmp{
- table: tableModel,
- }
-}
@@ -7,7 +7,7 @@ import (
"testing"
"github.com/alecthomas/chroma/v2/styles"
- "github.com/charmbracelet/crush/internal/exp/diffview"
+ "github.com/charmbracelet/crush/internal/tui/exp/diffview"
"github.com/charmbracelet/x/ansi"
"github.com/charmbracelet/x/exp/golden"
)
@@ -0,0 +1,89 @@
+package list
+
+import (
+ tea "github.com/charmbracelet/bubbletea/v2"
+ "github.com/charmbracelet/crush/internal/tui/components/core/layout"
+ "github.com/charmbracelet/crush/internal/tui/util"
+)
+
+type Item interface {
+ util.Model
+ layout.Sizeable
+}
+
+type List interface {
+ util.Model
+}
+
+type list struct {
+ width, height int
+ gap int
+
+ items []Item
+
+ renderedView string
+
+ // Filter options
+ filterable bool
+ filterPlaceholder string
+}
+
+type listOption func(*list)
+
+// WithFilterable enables filtering on the list.
+func WithFilterable(placeholder string) listOption {
+ return func(l *list) {
+ l.filterable = true
+ l.filterPlaceholder = placeholder
+ }
+}
+
+// WithItems sets the initial items for the list.
+func WithItems(items ...Item) listOption {
+ return func(l *list) {
+ l.items = items
+ }
+}
+
+// WithSize sets the size of the list.
+func WithSize(width, height int) listOption {
+ return func(l *list) {
+ l.width = width
+ l.height = height
+ }
+}
+
+// WithGap sets the gap between items in the list.
+func WithGap(gap int) listOption {
+ return func(l *list) {
+ l.gap = gap
+ }
+}
+
+func New(opts ...listOption) List {
+ list := &list{
+ items: make([]Item, 0),
+ }
+ for _, opt := range opts {
+ opt(list)
+ }
+ return list
+}
+
+// Init implements List.
+func (l *list) Init() tea.Cmd {
+ if l.height <= 0 || l.width <= 0 {
+ return nil
+ }
+ return nil
+}
+
+// Update implements List.
+func (l *list) Update(tea.Msg) (tea.Model, tea.Cmd) {
+ panic("unimplemented")
+}
+
+// View implements List.
+func (l *list) View() string {
+ panic("unimplemented")
+}
@@ -5,7 +5,6 @@ import (
)
type KeyMap struct {
- Logs key.Binding
Quit key.Binding
Help key.Binding
Commands key.Binding
@@ -16,10 +15,6 @@ type KeyMap struct {
func DefaultKeyMap() KeyMap {
return KeyMap{
- Logs: key.NewBinding(
- key.WithKeys("ctrl+l"),
- key.WithHelp("ctrl+l", "logs"),
- ),
Quit: key.NewBinding(
key.WithKeys("ctrl+c"),
key.WithHelp("ctrl+c", "quit"),
@@ -47,7 +42,6 @@ func (k KeyMap) FullHelp() [][]key.Binding {
k.Sessions,
k.Quit,
k.Help,
- k.Logs,
}
slice = k.prependEscAndTab(slice)
slice = append(slice, k.pageBindings...)
@@ -170,11 +170,12 @@ func (p *chatPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
util.CmdHandler(ChatFocusedMsg{Focused: false}),
)
case key.Matches(msg, p.keyMap.AddAttachment):
- model := config.GetAgentModel(config.AgentCoder)
+ agentCfg := config.Get().Agents["coder"]
+ model := config.Get().GetModelByType(agentCfg.Model)
if model.SupportsImages {
return p, util.CmdHandler(OpenFilePickerMsg{})
} else {
- return p, util.ReportWarn("File attachments are not supported by the current model: " + model.Name)
+ return p, util.ReportWarn("File attachments are not supported by the current model: " + model.Model)
}
case key.Matches(msg, p.keyMap.Tab):
if p.session.ID == "" {
@@ -302,7 +303,7 @@ func (p *chatPage) GetSize() (int, int) {
return p.layout.GetSize()
}
-func (p *chatPage) View() tea.View {
+func (p *chatPage) View() string {
if !p.compactMode || p.session.ID == "" {
// If not in compact mode or there is no session, we don't show the header
return p.layout.View()
@@ -310,8 +311,8 @@ func (p *chatPage) View() tea.View {
layoutView := p.layout.View()
chatView := strings.Join(
[]string{
- p.header.View().String(),
- layoutView.String(),
+ p.header.View(),
+ layoutView,
}, "\n",
)
layers := []*lipgloss.Layer{
@@ -326,7 +327,7 @@ func (p *chatPage) View() tea.View {
details := style.Render(
lipgloss.JoinVertical(
lipgloss.Left,
- p.compactSidebar.View().String(),
+ p.compactSidebar.View(),
version,
),
)
@@ -335,9 +336,14 @@ func (p *chatPage) View() tea.View {
canvas := lipgloss.NewCanvas(
layers...,
)
- view := tea.NewView(canvas.Render())
- view.SetCursor(layoutView.Cursor())
- return view
+ return canvas.Render()
+}
+
+func (p *chatPage) Cursor() *tea.Cursor {
+ if v, ok := p.layout.(util.Cursor); ok {
+ return v.Cursor()
+ }
+ return nil
}
func (p *chatPage) Bindings() []key.Binding {
@@ -1,43 +0,0 @@
-package logs
-
-import (
- "github.com/charmbracelet/bubbles/v2/key"
-)
-
-type KeyMap struct {
- Back key.Binding
-}
-
-func DefaultKeyMap() KeyMap {
- return KeyMap{
- Back: key.NewBinding(
- key.WithKeys("esc", "backspace"),
- key.WithHelp("esc/backspace", "back to chat"),
- ),
- }
-}
-
-// KeyBindings implements layout.KeyMapProvider
-func (k KeyMap) KeyBindings() []key.Binding {
- return []key.Binding{
- k.Back,
- }
-}
-
-// FullHelp implements help.KeyMap.
-func (k KeyMap) FullHelp() [][]key.Binding {
- m := [][]key.Binding{}
- slice := k.KeyBindings()
- for i := 0; i < len(slice); i += 4 {
- end := min(i+4, len(slice))
- m = append(m, slice[i:end])
- }
- return m
-}
-
-// ShortHelp implements help.KeyMap.
-func (k KeyMap) ShortHelp() []key.Binding {
- return []key.Binding{
- k.Back,
- }
-}
@@ -1,100 +0,0 @@
-package logs
-
-import (
- "github.com/charmbracelet/bubbles/v2/key"
- tea "github.com/charmbracelet/bubbletea/v2"
- "github.com/charmbracelet/crush/internal/tui/components/core"
- "github.com/charmbracelet/crush/internal/tui/components/core/layout"
- logsComponents "github.com/charmbracelet/crush/internal/tui/components/logs"
- "github.com/charmbracelet/crush/internal/tui/page"
- "github.com/charmbracelet/crush/internal/tui/page/chat"
- "github.com/charmbracelet/crush/internal/tui/styles"
- "github.com/charmbracelet/crush/internal/tui/util"
- "github.com/charmbracelet/lipgloss/v2"
-)
-
-var LogsPage page.PageID = "logs"
-
-type LogPage interface {
- util.Model
- layout.Sizeable
-}
-
-type logsPage struct {
- width, height int
- table logsComponents.TableComponent
- details logsComponents.DetailComponent
- keyMap KeyMap
-}
-
-func (p *logsPage) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
- var cmds []tea.Cmd
- switch msg := msg.(type) {
- case tea.WindowSizeMsg:
- p.width = msg.Width
- p.height = msg.Height
- return p, p.SetSize(msg.Width, msg.Height)
- case tea.KeyMsg:
- switch {
- case key.Matches(msg, p.keyMap.Back):
- return p, util.CmdHandler(page.PageChangeMsg{ID: chat.ChatPageID})
- }
- }
-
- table, cmd := p.table.Update(msg)
- cmds = append(cmds, cmd)
- p.table = table.(logsComponents.TableComponent)
- details, cmd := p.details.Update(msg)
- cmds = append(cmds, cmd)
- p.details = details.(logsComponents.DetailComponent)
-
- return p, tea.Batch(cmds...)
-}
-
-func (p *logsPage) View() tea.View {
- baseStyle := styles.CurrentTheme().S().Base
- style := baseStyle.Width(p.width).Height(p.height).Padding(1)
- title := core.Title("Logs", p.width-2)
-
- return tea.NewView(
- style.Render(
- lipgloss.JoinVertical(lipgloss.Top,
- title,
- p.details.View().String(),
- p.table.View().String(),
- ),
- ),
- )
-}
-
-// GetSize implements LogPage.
-func (p *logsPage) GetSize() (int, int) {
- return p.width, p.height
-}
-
-// SetSize implements LogPage.
-func (p *logsPage) SetSize(width int, height int) tea.Cmd {
- p.width = width
- p.height = height
- availableHeight := height - 2 // Padding for top and bottom
- availableHeight -= 1 // title height
- return tea.Batch(
- p.table.SetSize(width-2, availableHeight/2),
- p.details.SetSize(width-2, availableHeight/2),
- )
-}
-
-func (p *logsPage) Init() tea.Cmd {
- return tea.Batch(
- p.table.Init(),
- p.details.Init(),
- )
-}
-
-func NewLogsPage() LogPage {
- return &logsPage{
- details: logsComponents.NewLogsDetails(),
- table: logsComponents.NewLogsTable(),
- keyMap: DefaultKeyMap(),
- }
-}
@@ -10,7 +10,7 @@ import (
"github.com/charmbracelet/bubbles/v2/textarea"
"github.com/charmbracelet/bubbles/v2/textinput"
tea "github.com/charmbracelet/bubbletea/v2"
- "github.com/charmbracelet/crush/internal/exp/diffview"
+ "github.com/charmbracelet/crush/internal/tui/exp/diffview"
"github.com/charmbracelet/glamour/v2/ansi"
"github.com/charmbracelet/lipgloss/v2"
"github.com/charmbracelet/x/exp/charmtone"
@@ -9,7 +9,6 @@ import (
"github.com/charmbracelet/crush/internal/app"
"github.com/charmbracelet/crush/internal/config"
"github.com/charmbracelet/crush/internal/llm/agent"
- "github.com/charmbracelet/crush/internal/logging"
"github.com/charmbracelet/crush/internal/permission"
"github.com/charmbracelet/crush/internal/pubsub"
cmpChat "github.com/charmbracelet/crush/internal/tui/components/chat"
@@ -27,7 +26,6 @@ import (
"github.com/charmbracelet/crush/internal/tui/components/dialogs/sessions"
"github.com/charmbracelet/crush/internal/tui/page"
"github.com/charmbracelet/crush/internal/tui/page/chat"
- "github.com/charmbracelet/crush/internal/tui/page/logs"
"github.com/charmbracelet/crush/internal/tui/styles"
"github.com/charmbracelet/crush/internal/tui/util"
"github.com/charmbracelet/lipgloss/v2"
@@ -84,6 +82,9 @@ func (a appModel) Init() tea.Cmd {
return nil
})
+ // Enable mouse support.
+ cmds = append(cmds, tea.EnableMouseAllMotion)
+
return tea.Batch(cmds...)
}
@@ -135,20 +136,6 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
a.selectedSessionID = msg.ID
case cmpChat.SessionClearedMsg:
a.selectedSessionID = ""
- // Logs
- case pubsub.Event[logging.LogMessage]:
- // Send to the status component
- s, statusCmd := a.status.Update(msg)
- a.status = s.(status.StatusCmp)
- cmds = append(cmds, statusCmd)
-
- // If the current page is logs, update the logs view
- if a.currentPage == logs.LogsPage {
- updated, pageCmd := a.pages[a.currentPage].Update(msg)
- a.pages[a.currentPage] = updated.(util.Model)
- cmds = append(cmds, pageCmd)
- }
- return a, tea.Batch(cmds...)
// Commands
case commands.SwitchSessionsMsg:
return a, func() tea.Msg {
@@ -176,15 +163,14 @@ func (a *appModel) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
// Update the agent with the new model/provider configuration
if err := a.app.UpdateAgentModel(); err != nil {
- logging.ErrorPersist(fmt.Sprintf("Failed to update agent model: %v", err))
- return a, util.ReportError(fmt.Errorf("model changed to %s but failed to update agent: %v", msg.Model.ModelID, err))
+ return a, util.ReportError(fmt.Errorf("model changed to %s but failed to update agent: %v", msg.Model.Model, err))
}
modelTypeName := "large"
- if msg.ModelType == config.SmallModel {
+ if msg.ModelType == config.SelectedModelTypeSmall {
modelTypeName = "small"
}
- return a, util.ReportInfo(fmt.Sprintf("%s model changed to %s", modelTypeName, msg.Model.ModelID))
+ return a, util.ReportInfo(fmt.Sprintf("%s model changed to %s", modelTypeName, msg.Model.Model))
// File Picker
case chat.OpenFilePickerMsg:
@@ -348,10 +334,6 @@ func (a *appModel) handleKeyPressMsg(msg tea.KeyPressMsg) tea.Cmd {
},
)
return tea.Sequence(cmds...)
- // Page navigation
- case key.Matches(msg, a.keyMap.Logs):
- return a.moveToPage(logs.LogsPage)
-
default:
if a.dialog.HasDialogs() {
u, dialogCmd := a.dialog.Update(msg)
@@ -397,9 +379,9 @@ func (a *appModel) View() tea.View {
a.status.SetKeyMap(a.keyMap)
pageView := page.View()
components := []string{
- pageView.String(),
+ pageView,
}
- components = append(components, a.status.View().String())
+ components = append(components, a.status.View())
appView := lipgloss.JoinVertical(lipgloss.Top, components...)
layers := []*lipgloss.Layer{
@@ -412,14 +394,20 @@ func (a *appModel) View() tea.View {
)
}
- cursor := pageView.Cursor()
- activeView := a.dialog.ActiveView()
+ var cursor *tea.Cursor
+ if v, ok := page.(util.Cursor); ok {
+ cursor = v.Cursor()
+ }
+ activeView := a.dialog.ActiveModel()
if activeView != nil {
- cursor = activeView.Cursor()
+ cursor = nil // Reset cursor if a dialog is active unless it implements util.Cursor
+ if v, ok := activeView.(util.Cursor); ok {
+ cursor = v.Cursor()
+ }
}
if a.completions.Open() && cursor != nil {
- cmp := a.completions.View().String()
+ cmp := a.completions.View()
x, y := a.completions.Position()
layers = append(
layers,
@@ -431,10 +419,11 @@ func (a *appModel) View() tea.View {
layers...,
)
+ var view tea.View
t := styles.CurrentTheme()
- view := tea.NewView(canvas.Render())
- view.SetBackgroundColor(t.BgBase)
- view.SetCursor(cursor)
+ view.Layer = canvas
+ view.BackgroundColor = t.BgBase
+ view.Cursor = cursor
return view
}
@@ -453,7 +442,6 @@ func New(app *app.App) tea.Model {
pages: map[page.PageID]util.Model{
chat.ChatPageID: chatPage,
- logs.LogsPage: logs.NewLogsPage(),
},
dialog: dialogs.NewDialogCmp(),
@@ -6,9 +6,13 @@ import (
tea "github.com/charmbracelet/bubbletea/v2"
)
+type Cursor interface {
+ Cursor() *tea.Cursor
+}
+
type Model interface {
tea.Model
- tea.Viewable
+ tea.ViewModel
}
func CmdHandler(msg tea.Msg) tea.Cmd {
@@ -1,25 +1,29 @@
package main
import (
+ "fmt"
+ "log/slog"
"net/http"
"os"
_ "net/http/pprof" // profiling
+ _ "github.com/joho/godotenv/autoload" // automatically load .env files
+
"github.com/charmbracelet/crush/cmd"
- "github.com/charmbracelet/crush/internal/logging"
+ "github.com/charmbracelet/crush/internal/log"
)
func main() {
- defer logging.RecoverPanic("main", func() {
- logging.ErrorPersist("Application terminated due to unhandled panic")
+ defer log.RecoverPanic("main", func() {
+ slog.Error("Application terminated due to unhandled panic")
})
if os.Getenv("CRUSH_PROFILE") != "" {
go func() {
- logging.Info("Serving pprof at localhost:6060")
+ slog.Info("Serving pprof at localhost:6060")
if httpErr := http.ListenAndServe("localhost:6060", nil); httpErr != nil {
- logging.Error("Failed to pprof listen: %v", httpErr)
+ slog.Error(fmt.Sprintf("Failed to pprof listen: %v", httpErr))
}
}()
}
@@ -1,51 +0,0 @@
-## TODOs before release
-
-- [x] Implement help
- - [x] Show full help
- - [x] Make help dependent on the focused pane and page
-- [x] Implement current model in the sidebar
-- [x] Implement LSP errors
-- [x] Implement changed files
- - [x] Implement initial load
- - [x] Implement realtime file changes
-- [ ] Events when tool error
-- [ ] Support bash commands
-- [ ] Editor attachments fixes
- - [ ] Reimplement removing attachments
-- [ ] Fix the logs view
- - [ ] Review the implementation
- - [ ] The page lags
- - [ ] Make the logs long lived ?
-- [ ] Add all possible actions to the commands
-- [ ] Parallel tool calls and permissions
- - [ ] Run the tools in parallel and add results in parallel
- - [ ] Show multiple permissions dialogs
-- [ ] Add another space around buttons
-- [ ] Completions
- - [ ] Should change the help to show the completions stuff
- - [ ] Should make it wider
- - [ ] Tab and ctrl+y should accept
- - [ ] Words should line up
- - [ ] If there are no completions and cick tab/ctrl+y/enter it should close it
-- [ ] Investigate messages issues
- - [ ] Make the agent separator look like the
- - [ ] Cleanup tool calls (watch all states)
- - [ ] Weird behavior sometimes the message does not update
- - [ ] Message length (I saw the message go beyond the correct length when there are errors)
- - [ ] Address UX issues
- - [ ] Fix issue with numbers (padding) view tool
-- [x] Implement responsive mode
-- [ ] Update interactive mode to use the spinner
-- [ ] Revisit the core list component
- - [ ] This component has become super complex we might need to fix this.
-- [ ] Handle correct LSP and MCP status icon
-- [x] Investigate ways to make the spinner less CPU intensive
-- [ ] General cleanup and documentation
-- [ ] Update the readme
-
-## Maybe
-
-- [ ] Revisit the provider/model/configs
-- [ ] Implement correct persistent shell
-- [ ] Store file read/write time somewhere so that the we can make sure that even if we restart we do not need to re-read the same file
-- [ ] Send updates to the UI when new LSP diagnostics are available
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
@@ -0,0 +1,368 @@
+# Changelog
+
+## [0.13.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.12.1...auth/v0.13.0) (2024-12-13)
+
+
+### Features
+
+* **auth:** Add logging support ([#11079](https://github.com/googleapis/google-cloud-go/issues/11079)) ([c80e31d](https://github.com/googleapis/google-cloud-go/commit/c80e31df5ecb33a810be3dfb9d9e27ac531aa91d))
+* **auth:** Pass logger from auth layer to metadata package ([#11288](https://github.com/googleapis/google-cloud-go/issues/11288)) ([b552efd](https://github.com/googleapis/google-cloud-go/commit/b552efd6ab34e5dfded18438e0fbfd925805614f))
+
+
+### Bug Fixes
+
+* **auth:** Check compute cred type before non-default flag for DP ([#11255](https://github.com/googleapis/google-cloud-go/issues/11255)) ([4347ca1](https://github.com/googleapis/google-cloud-go/commit/4347ca141892be8ae813399b4b437662a103bc90))
+
+## [0.12.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.12.0...auth/v0.12.1) (2024-12-10)
+
+
+### Bug Fixes
+
+* **auth:** Correct typo in link ([#11160](https://github.com/googleapis/google-cloud-go/issues/11160)) ([af6fb46](https://github.com/googleapis/google-cloud-go/commit/af6fb46d7cd694ddbe8c9d63bc4cdcd62b9fb2c1))
+
+## [0.12.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.11.0...auth/v0.12.0) (2024-12-04)
+
+
+### Features
+
+* **auth:** Add support for providing custom certificate URL ([#11006](https://github.com/googleapis/google-cloud-go/issues/11006)) ([ebf3657](https://github.com/googleapis/google-cloud-go/commit/ebf36579724afb375d3974cf1da38f703e3b7dbc)), refs [#11005](https://github.com/googleapis/google-cloud-go/issues/11005)
+
+
+### Bug Fixes
+
+* **auth:** Ensure endpoints are present in Validator ([#11209](https://github.com/googleapis/google-cloud-go/issues/11209)) ([106cd53](https://github.com/googleapis/google-cloud-go/commit/106cd53309facaef1b8ea78376179f523f6912b9)), refs [#11006](https://github.com/googleapis/google-cloud-go/issues/11006) [#11190](https://github.com/googleapis/google-cloud-go/issues/11190) [#11189](https://github.com/googleapis/google-cloud-go/issues/11189) [#11188](https://github.com/googleapis/google-cloud-go/issues/11188)
+
+## [0.11.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.10.2...auth/v0.11.0) (2024-11-21)
+
+
+### Features
+
+* **auth:** Add universe domain support to mTLS ([#11159](https://github.com/googleapis/google-cloud-go/issues/11159)) ([117748b](https://github.com/googleapis/google-cloud-go/commit/117748ba1cfd4ae62a6a4feb7e30951cb2bc9344))
+
+## [0.10.2](https://github.com/googleapis/google-cloud-go/compare/auth/v0.10.1...auth/v0.10.2) (2024-11-12)
+
+
+### Bug Fixes
+
+* **auth:** Restore use of grpc.Dial ([#11118](https://github.com/googleapis/google-cloud-go/issues/11118)) ([2456b94](https://github.com/googleapis/google-cloud-go/commit/2456b943b7b8aaabd4d8bfb7572c0f477ae0db45)), refs [#7556](https://github.com/googleapis/google-cloud-go/issues/7556)
+
+## [0.10.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.10.0...auth/v0.10.1) (2024-11-06)
+
+
+### Bug Fixes
+
+* **auth:** Restore Application Default Credentials support to idtoken ([#11083](https://github.com/googleapis/google-cloud-go/issues/11083)) ([8771f2e](https://github.com/googleapis/google-cloud-go/commit/8771f2ea9807ab822083808e0678392edff3b4f2))
+* **auth:** Skip impersonate universe domain check if empty ([#11086](https://github.com/googleapis/google-cloud-go/issues/11086)) ([87159c1](https://github.com/googleapis/google-cloud-go/commit/87159c1059d4a18d1367ce62746a838a94964ab6))
+
+## [0.10.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.9...auth/v0.10.0) (2024-10-30)
+
+
+### Features
+
+* **auth:** Add universe domain support to credentials/impersonate ([#10953](https://github.com/googleapis/google-cloud-go/issues/10953)) ([e06cb64](https://github.com/googleapis/google-cloud-go/commit/e06cb6499f7eda3aef08ab18ff197016f667684b))
+
+## [0.9.9](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.8...auth/v0.9.9) (2024-10-22)
+
+
+### Bug Fixes
+
+* **auth:** Fallback cert lookups for missing files ([#11013](https://github.com/googleapis/google-cloud-go/issues/11013)) ([bd76695](https://github.com/googleapis/google-cloud-go/commit/bd766957ec238b7c40ddbabb369e612dc9b07313)), refs [#10844](https://github.com/googleapis/google-cloud-go/issues/10844)
+* **auth:** Replace MDS endpoint universe_domain with universe-domain ([#11000](https://github.com/googleapis/google-cloud-go/issues/11000)) ([6a1586f](https://github.com/googleapis/google-cloud-go/commit/6a1586f2ce9974684affaea84e7b629313b4d114))
+
+## [0.9.8](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.7...auth/v0.9.8) (2024-10-09)
+
+
+### Bug Fixes
+
+* **auth:** Restore OpenTelemetry handling in transports ([#10968](https://github.com/googleapis/google-cloud-go/issues/10968)) ([08c6d04](https://github.com/googleapis/google-cloud-go/commit/08c6d04901c1a20e219b2d86df41dbaa6d7d7b55)), refs [#10962](https://github.com/googleapis/google-cloud-go/issues/10962)
+* **auth:** Try talk to plaintext S2A if credentials can not be found for mTLS-S2A ([#10941](https://github.com/googleapis/google-cloud-go/issues/10941)) ([0f0bf2d](https://github.com/googleapis/google-cloud-go/commit/0f0bf2d18c97dd8b65bcf0099f0802b5631c6287))
+
+## [0.9.7](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.6...auth/v0.9.7) (2024-10-01)
+
+
+### Bug Fixes
+
+* **auth:** Restore support for non-default service accounts for DirectPath ([#10937](https://github.com/googleapis/google-cloud-go/issues/10937)) ([a38650e](https://github.com/googleapis/google-cloud-go/commit/a38650edbf420223077498cafa537aec74b37aad)), refs [#10907](https://github.com/googleapis/google-cloud-go/issues/10907)
+
+## [0.9.6](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.5...auth/v0.9.6) (2024-09-30)
+
+
+### Bug Fixes
+
+* **auth:** Make aws credentials provider retrieve fresh credentials ([#10920](https://github.com/googleapis/google-cloud-go/issues/10920)) ([250fbf8](https://github.com/googleapis/google-cloud-go/commit/250fbf87d858d865e399a241b7e537c4ff0c3dd8))
+
+## [0.9.5](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.4...auth/v0.9.5) (2024-09-25)
+
+
+### Bug Fixes
+
+* **auth:** Restore support for GOOGLE_CLOUD_UNIVERSE_DOMAIN env ([#10915](https://github.com/googleapis/google-cloud-go/issues/10915)) ([94caaaa](https://github.com/googleapis/google-cloud-go/commit/94caaaa061362d0e00ef6214afcc8a0a3e7ebfb2))
+* **auth:** Skip directpath credentials overwrite when it's not on GCE ([#10833](https://github.com/googleapis/google-cloud-go/issues/10833)) ([7e5e8d1](https://github.com/googleapis/google-cloud-go/commit/7e5e8d10b761b0a6e43e19a028528db361bc07b1))
+* **auth:** Use new context for non-blocking token refresh ([#10919](https://github.com/googleapis/google-cloud-go/issues/10919)) ([cf7102d](https://github.com/googleapis/google-cloud-go/commit/cf7102d33a21be1e5a9d47a49456b3a57c43b350))
+
+## [0.9.4](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.3...auth/v0.9.4) (2024-09-11)
+
+
+### Bug Fixes
+
+* **auth:** Enable self-signed JWT for non-GDU universe domain ([#10831](https://github.com/googleapis/google-cloud-go/issues/10831)) ([f9869f7](https://github.com/googleapis/google-cloud-go/commit/f9869f7903cfd34d1b97c25d0dc5669d2c5138e6))
+
+## [0.9.3](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.2...auth/v0.9.3) (2024-09-03)
+
+
+### Bug Fixes
+
+* **auth:** Choose quota project envvar over file when both present ([#10807](https://github.com/googleapis/google-cloud-go/issues/10807)) ([2d8dd77](https://github.com/googleapis/google-cloud-go/commit/2d8dd7700eff92d4b95027be55e26e1e7aa79181)), refs [#10804](https://github.com/googleapis/google-cloud-go/issues/10804)
+
+## [0.9.2](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.1...auth/v0.9.2) (2024-08-30)
+
+
+### Bug Fixes
+
+* **auth:** Handle non-Transport DefaultTransport ([#10733](https://github.com/googleapis/google-cloud-go/issues/10733)) ([98d91dc](https://github.com/googleapis/google-cloud-go/commit/98d91dc8316b247498fab41ab35e57a0446fe556)), refs [#10742](https://github.com/googleapis/google-cloud-go/issues/10742)
+* **auth:** Make sure quota option takes precedence over env/file ([#10797](https://github.com/googleapis/google-cloud-go/issues/10797)) ([f1b050d](https://github.com/googleapis/google-cloud-go/commit/f1b050d56d804b245cab048c2980d32b0eaceb4e)), refs [#10795](https://github.com/googleapis/google-cloud-go/issues/10795)
+
+
+### Documentation
+
+* **auth:** Fix Go doc comment link ([#10751](https://github.com/googleapis/google-cloud-go/issues/10751)) ([015acfa](https://github.com/googleapis/google-cloud-go/commit/015acfab4d172650928bb1119bc2cd6307b9a437))
+
+## [0.9.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.0...auth/v0.9.1) (2024-08-22)
+
+
+### Bug Fixes
+
+* **auth:** Setting expireEarly to default when the value is 0 ([#10732](https://github.com/googleapis/google-cloud-go/issues/10732)) ([5e67869](https://github.com/googleapis/google-cloud-go/commit/5e67869a31e9e8ecb4eeebd2cfa11a761c3b1948))
+
+## [0.9.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.8.1...auth/v0.9.0) (2024-08-16)
+
+
+### Features
+
+* **auth:** Auth library can talk to S2A over mTLS ([#10634](https://github.com/googleapis/google-cloud-go/issues/10634)) ([5250a13](https://github.com/googleapis/google-cloud-go/commit/5250a13ec95b8d4eefbe0158f82857ff2189cb45))
+
+## [0.8.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.8.0...auth/v0.8.1) (2024-08-13)
+
+
+### Bug Fixes
+
+* **auth:** Make default client creation more lenient ([#10669](https://github.com/googleapis/google-cloud-go/issues/10669)) ([1afb9ee](https://github.com/googleapis/google-cloud-go/commit/1afb9ee1ee9de9810722800018133304a0ca34d1)), refs [#10638](https://github.com/googleapis/google-cloud-go/issues/10638)
+
+## [0.8.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.7.3...auth/v0.8.0) (2024-08-07)
+
+
+### Features
+
+* **auth:** Adds support for X509 workload identity federation ([#10373](https://github.com/googleapis/google-cloud-go/issues/10373)) ([5d07505](https://github.com/googleapis/google-cloud-go/commit/5d075056cbe27bb1da4072a26070c41f8999eb9b))
+
+## [0.7.3](https://github.com/googleapis/google-cloud-go/compare/auth/v0.7.2...auth/v0.7.3) (2024-08-01)
+
+
+### Bug Fixes
+
+* **auth/oauth2adapt:** Update dependencies ([257c40b](https://github.com/googleapis/google-cloud-go/commit/257c40bd6d7e59730017cf32bda8823d7a232758))
+* **auth:** Disable automatic universe domain check for MDS ([#10620](https://github.com/googleapis/google-cloud-go/issues/10620)) ([7cea5ed](https://github.com/googleapis/google-cloud-go/commit/7cea5edd5a0c1e6bca558696f5607879141910e8))
+* **auth:** Update dependencies ([257c40b](https://github.com/googleapis/google-cloud-go/commit/257c40bd6d7e59730017cf32bda8823d7a232758))
+
+## [0.7.2](https://github.com/googleapis/google-cloud-go/compare/auth/v0.7.1...auth/v0.7.2) (2024-07-22)
+
+
+### Bug Fixes
+
+* **auth:** Use default client for universe metadata lookup ([#10551](https://github.com/googleapis/google-cloud-go/issues/10551)) ([d9046fd](https://github.com/googleapis/google-cloud-go/commit/d9046fdd1435d1ce48f374806c1def4cb5ac6cd3)), refs [#10544](https://github.com/googleapis/google-cloud-go/issues/10544)
+
+## [0.7.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.7.0...auth/v0.7.1) (2024-07-10)
+
+
+### Bug Fixes
+
+* **auth:** Bump google.golang.org/grpc@v1.64.1 ([8ecc4e9](https://github.com/googleapis/google-cloud-go/commit/8ecc4e9622e5bbe9b90384d5848ab816027226c5))
+
+## [0.7.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.6.1...auth/v0.7.0) (2024-07-09)
+
+
+### Features
+
+* **auth:** Add workload X509 cert provider as a default cert provider ([#10479](https://github.com/googleapis/google-cloud-go/issues/10479)) ([c51ee6c](https://github.com/googleapis/google-cloud-go/commit/c51ee6cf65ce05b4d501083e49d468c75ac1ea63))
+
+
+### Bug Fixes
+
+* **auth/oauth2adapt:** Bump google.golang.org/api@v0.187.0 ([8fa9e39](https://github.com/googleapis/google-cloud-go/commit/8fa9e398e512fd8533fd49060371e61b5725a85b))
+* **auth:** Bump google.golang.org/api@v0.187.0 ([8fa9e39](https://github.com/googleapis/google-cloud-go/commit/8fa9e398e512fd8533fd49060371e61b5725a85b))
+* **auth:** Check len of slices, not non-nil ([#10483](https://github.com/googleapis/google-cloud-go/issues/10483)) ([0a966a1](https://github.com/googleapis/google-cloud-go/commit/0a966a183e5f0e811977216d736d875b7233e942))
+
+## [0.6.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.6.0...auth/v0.6.1) (2024-07-01)
+
+
+### Bug Fixes
+
+* **auth:** Support gRPC API keys ([#10460](https://github.com/googleapis/google-cloud-go/issues/10460)) ([daa6646](https://github.com/googleapis/google-cloud-go/commit/daa6646d2af5d7fb5b30489f4934c7db89868c7c))
+* **auth:** Update http and grpc transports to support token exchange over mTLS ([#10397](https://github.com/googleapis/google-cloud-go/issues/10397)) ([c6dfdcf](https://github.com/googleapis/google-cloud-go/commit/c6dfdcf893c3f971eba15026c12db0a960ae81f2))
+
+## [0.6.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.5.2...auth/v0.6.0) (2024-06-25)
+
+
+### Features
+
+* **auth:** Add non-blocking token refresh for compute MDS ([#10263](https://github.com/googleapis/google-cloud-go/issues/10263)) ([9ac350d](https://github.com/googleapis/google-cloud-go/commit/9ac350da11a49b8e2174d3fc5b1a5070fec78b4e))
+
+
+### Bug Fixes
+
+* **auth:** Return error if envvar detected file returns an error ([#10431](https://github.com/googleapis/google-cloud-go/issues/10431)) ([e52b9a7](https://github.com/googleapis/google-cloud-go/commit/e52b9a7c45468827f5d220ab00965191faeb9d05))
+
+## [0.5.2](https://github.com/googleapis/google-cloud-go/compare/auth/v0.5.1...auth/v0.5.2) (2024-06-24)
+
+
+### Bug Fixes
+
+* **auth:** Fetch initial token when CachedTokenProviderOptions.DisableAutoRefresh is true ([#10415](https://github.com/googleapis/google-cloud-go/issues/10415)) ([3266763](https://github.com/googleapis/google-cloud-go/commit/32667635ca2efad05cd8c087c004ca07d7406913)), refs [#10414](https://github.com/googleapis/google-cloud-go/issues/10414)
+
+## [0.5.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.5.0...auth/v0.5.1) (2024-05-31)
+
+
+### Bug Fixes
+
+* **auth:** Pass through client to 2LO and 3LO flows ([#10290](https://github.com/googleapis/google-cloud-go/issues/10290)) ([685784e](https://github.com/googleapis/google-cloud-go/commit/685784ea84358c15e9214bdecb307d37aa3b6d2f))
+
+## [0.5.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.4.2...auth/v0.5.0) (2024-05-28)
+
+
+### Features
+
+* **auth:** Adds X509 workload certificate provider ([#10233](https://github.com/googleapis/google-cloud-go/issues/10233)) ([17a9db7](https://github.com/googleapis/google-cloud-go/commit/17a9db73af35e3d1a7a25ac4fd1377a103de6150))
+
+## [0.4.2](https://github.com/googleapis/google-cloud-go/compare/auth/v0.4.1...auth/v0.4.2) (2024-05-16)
+
+
+### Bug Fixes
+
+* **auth:** Enable client certificates by default only for GDU ([#10151](https://github.com/googleapis/google-cloud-go/issues/10151)) ([7c52978](https://github.com/googleapis/google-cloud-go/commit/7c529786275a39b7e00525f7d5e7be0d963e9e15))
+* **auth:** Handle non-Transport DefaultTransport ([#10162](https://github.com/googleapis/google-cloud-go/issues/10162)) ([fa3bfdb](https://github.com/googleapis/google-cloud-go/commit/fa3bfdb23aaa45b34394a8b61e753b3587506782)), refs [#10159](https://github.com/googleapis/google-cloud-go/issues/10159)
+* **auth:** Have refresh time match docs ([#10147](https://github.com/googleapis/google-cloud-go/issues/10147)) ([bcb5568](https://github.com/googleapis/google-cloud-go/commit/bcb5568c07a54dd3d2e869d15f502b0741a609e8))
+* **auth:** Update compute token fetching error with named prefix ([#10180](https://github.com/googleapis/google-cloud-go/issues/10180)) ([4573504](https://github.com/googleapis/google-cloud-go/commit/4573504828d2928bebedc875d87650ba227829ea))
+
+## [0.4.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.4.0...auth/v0.4.1) (2024-05-09)
+
+
+### Bug Fixes
+
+* **auth:** Don't try to detect default creds it opt configured ([#10143](https://github.com/googleapis/google-cloud-go/issues/10143)) ([804632e](https://github.com/googleapis/google-cloud-go/commit/804632e7c5b0b85ff522f7951114485e256eb5bc))
+
+## [0.4.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.3.0...auth/v0.4.0) (2024-05-07)
+
+
+### Features
+
+* **auth:** Enable client certificates by default ([#10102](https://github.com/googleapis/google-cloud-go/issues/10102)) ([9013e52](https://github.com/googleapis/google-cloud-go/commit/9013e5200a6ec0f178ed91acb255481ffb073a2c))
+
+
+### Bug Fixes
+
+* **auth:** Get s2a logic up to date ([#10093](https://github.com/googleapis/google-cloud-go/issues/10093)) ([4fe9ae4](https://github.com/googleapis/google-cloud-go/commit/4fe9ae4b7101af2a5221d6d6b2e77b479305bb06))
+
+## [0.3.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.2.2...auth/v0.3.0) (2024-04-23)
+
+
+### Features
+
+* **auth/httptransport:** Add ability to customize transport ([#10023](https://github.com/googleapis/google-cloud-go/issues/10023)) ([72c7f6b](https://github.com/googleapis/google-cloud-go/commit/72c7f6bbec3136cc7a62788fc7186bc33ef6c3b3)), refs [#9812](https://github.com/googleapis/google-cloud-go/issues/9812) [#9814](https://github.com/googleapis/google-cloud-go/issues/9814)
+
+
+### Bug Fixes
+
+* **auth/credentials:** Error on bad file name if explicitly set ([#10018](https://github.com/googleapis/google-cloud-go/issues/10018)) ([55beaa9](https://github.com/googleapis/google-cloud-go/commit/55beaa993aaf052d8be39766afc6777c3c2a0bdd)), refs [#9809](https://github.com/googleapis/google-cloud-go/issues/9809)
+
+## [0.2.2](https://github.com/googleapis/google-cloud-go/compare/auth/v0.2.1...auth/v0.2.2) (2024-04-19)
+
+
+### Bug Fixes
+
+* **auth:** Add internal opt to skip validation on transports ([#9999](https://github.com/googleapis/google-cloud-go/issues/9999)) ([9e20ef8](https://github.com/googleapis/google-cloud-go/commit/9e20ef89f6287d6bd03b8697d5898dc43b4a77cf)), refs [#9823](https://github.com/googleapis/google-cloud-go/issues/9823)
+* **auth:** Set secure flag for gRPC conn pools ([#10002](https://github.com/googleapis/google-cloud-go/issues/10002)) ([14e3956](https://github.com/googleapis/google-cloud-go/commit/14e3956dfd736399731b5ee8d9b178ae085cf7ba)), refs [#9833](https://github.com/googleapis/google-cloud-go/issues/9833)
+
+## [0.2.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.2.0...auth/v0.2.1) (2024-04-18)
+
+
+### Bug Fixes
+
+* **auth:** Default gRPC token type to Bearer if not set ([#9800](https://github.com/googleapis/google-cloud-go/issues/9800)) ([5284066](https://github.com/googleapis/google-cloud-go/commit/5284066670b6fe65d79089cfe0199c9660f87fc7))
+
+## [0.2.0](https://github.com/googleapis/google-cloud-go/compare/auth/v0.1.1...auth/v0.2.0) (2024-04-15)
+
+### Breaking Changes
+
+In the below mentioned commits there were a few large breaking changes since the
+last release of the module.
+
+1. The `Credentials` type has been moved to the root of the module as it is
+ becoming the core abstraction for the whole module.
+2. Because of the above mentioned change many functions that previously
+ returned a `TokenProvider` now return `Credentials`. Similarly, these
+ functions have been renamed to be more specific.
+3. Most places that used to take an optional `TokenProvider` now accept
+ `Credentials`. You can make a `Credentials` from a `TokenProvider` using the
+ constructor found in the `auth` package.
+4. The `detect` package has been renamed to `credentials`. With this change some
+ function signatures were also updated for better readability.
+5. Derivative auth flows like `impersonate` and `downscope` have been moved to
+ be under the new `credentials` package.
+
+Although these changes are disruptive we think that they are for the best of the
+long-term health of the module. We do not expect any more large breaking changes
+like these in future revisions, even before 1.0.0. This version will be the
+first version of the auth library that our client libraries start to use and
+depend on.
+
+### Features
+
+* **auth/credentials/externalaccount:** Add default TokenURL ([#9700](https://github.com/googleapis/google-cloud-go/issues/9700)) ([81830e6](https://github.com/googleapis/google-cloud-go/commit/81830e6848ceefd055aa4d08f933d1154455a0f6))
+* **auth:** Add downscope.Options.UniverseDomain ([#9634](https://github.com/googleapis/google-cloud-go/issues/9634)) ([52cf7d7](https://github.com/googleapis/google-cloud-go/commit/52cf7d780853594291c4e34302d618299d1f5a1d))
+* **auth:** Add universe domain to grpctransport and httptransport ([#9663](https://github.com/googleapis/google-cloud-go/issues/9663)) ([67d353b](https://github.com/googleapis/google-cloud-go/commit/67d353beefe3b607c08c891876fbd95ab89e5fe3)), refs [#9670](https://github.com/googleapis/google-cloud-go/issues/9670)
+* **auth:** Add UniverseDomain to DetectOptions ([#9536](https://github.com/googleapis/google-cloud-go/issues/9536)) ([3618d3f](https://github.com/googleapis/google-cloud-go/commit/3618d3f7061615c0e189f376c75abc201203b501))
+* **auth:** Make package externalaccount public ([#9633](https://github.com/googleapis/google-cloud-go/issues/9633)) ([a0978d8](https://github.com/googleapis/google-cloud-go/commit/a0978d8e96968399940ebd7d092539772bf9caac))
+* **auth:** Move credentials to base auth package ([#9590](https://github.com/googleapis/google-cloud-go/issues/9590)) ([1a04baf](https://github.com/googleapis/google-cloud-go/commit/1a04bafa83c27342b9308d785645e1e5423ea10d))
+* **auth:** Refactor public sigs to use Credentials ([#9603](https://github.com/googleapis/google-cloud-go/issues/9603)) ([69cb240](https://github.com/googleapis/google-cloud-go/commit/69cb240c530b1f7173a9af2555c19e9a1beb56c5))
+
+
+### Bug Fixes
+
+* **auth/oauth2adapt:** Update protobuf dep to v1.33.0 ([30b038d](https://github.com/googleapis/google-cloud-go/commit/30b038d8cac0b8cd5dd4761c87f3f298760dd33a))
+* **auth:** Fix uint32 conversion ([9221c7f](https://github.com/googleapis/google-cloud-go/commit/9221c7fa12cef9d5fb7ddc92f41f1d6204971c7b))
+* **auth:** Port sts expires fix ([#9618](https://github.com/googleapis/google-cloud-go/issues/9618)) ([7bec97b](https://github.com/googleapis/google-cloud-go/commit/7bec97b2f51ed3ac4f9b88bf100d301da3f5d1bd))
+* **auth:** Read universe_domain from all credentials files ([#9632](https://github.com/googleapis/google-cloud-go/issues/9632)) ([16efbb5](https://github.com/googleapis/google-cloud-go/commit/16efbb52e39ea4a319e5ee1e95c0e0305b6d9824))
+* **auth:** Remove content-type header from idms get requests ([#9508](https://github.com/googleapis/google-cloud-go/issues/9508)) ([8589f41](https://github.com/googleapis/google-cloud-go/commit/8589f41599d265d7c3d46a3d86c9fab2329cbdd9))
+* **auth:** Update protobuf dep to v1.33.0 ([30b038d](https://github.com/googleapis/google-cloud-go/commit/30b038d8cac0b8cd5dd4761c87f3f298760dd33a))
+
+## [0.1.1](https://github.com/googleapis/google-cloud-go/compare/auth/v0.1.0...auth/v0.1.1) (2024-03-10)
+
+
+### Bug Fixes
+
+* **auth/impersonate:** Properly send default detect params ([#9529](https://github.com/googleapis/google-cloud-go/issues/9529)) ([5b6b8be](https://github.com/googleapis/google-cloud-go/commit/5b6b8bef577f82707e51f5cc5d258d5bdf90218f)), refs [#9136](https://github.com/googleapis/google-cloud-go/issues/9136)
+* **auth:** Update grpc-go to v1.56.3 ([343cea8](https://github.com/googleapis/google-cloud-go/commit/343cea8c43b1e31ae21ad50ad31d3b0b60143f8c))
+* **auth:** Update grpc-go to v1.59.0 ([81a97b0](https://github.com/googleapis/google-cloud-go/commit/81a97b06cb28b25432e4ece595c55a9857e960b7))
+
+## 0.1.0 (2023-10-18)
+
+
+### Features
+
+* **auth:** Add base auth package ([#8465](https://github.com/googleapis/google-cloud-go/issues/8465)) ([6a45f26](https://github.com/googleapis/google-cloud-go/commit/6a45f26b809b64edae21f312c18d4205f96b180e))
+* **auth:** Add cert support to httptransport ([#8569](https://github.com/googleapis/google-cloud-go/issues/8569)) ([37e3435](https://github.com/googleapis/google-cloud-go/commit/37e3435f8e98595eafab481bdfcb31a4c56fa993))
+* **auth:** Add Credentials.UniverseDomain() ([#8654](https://github.com/googleapis/google-cloud-go/issues/8654)) ([af0aa1e](https://github.com/googleapis/google-cloud-go/commit/af0aa1ed8015bc8fe0dd87a7549ae029107cbdb8))
+* **auth:** Add detect package ([#8491](https://github.com/googleapis/google-cloud-go/issues/8491)) ([d977419](https://github.com/googleapis/google-cloud-go/commit/d977419a3269f6acc193df77a2136a6eb4b4add7))
+* **auth:** Add downscope package ([#8532](https://github.com/googleapis/google-cloud-go/issues/8532)) ([dda9bff](https://github.com/googleapis/google-cloud-go/commit/dda9bff8ec70e6d104901b4105d13dcaa4e2404c))
+* **auth:** Add grpctransport package ([#8625](https://github.com/googleapis/google-cloud-go/issues/8625)) ([69a8347](https://github.com/googleapis/google-cloud-go/commit/69a83470bdcc7ed10c6c36d1abc3b7cfdb8a0ee5))
+* **auth:** Add httptransport package ([#8567](https://github.com/googleapis/google-cloud-go/issues/8567)) ([6898597](https://github.com/googleapis/google-cloud-go/commit/6898597d2ea95d630fcd00fd15c58c75ea843bff))
+* **auth:** Add idtoken package ([#8580](https://github.com/googleapis/google-cloud-go/issues/8580)) ([a79e693](https://github.com/googleapis/google-cloud-go/commit/a79e693e97e4e3e1c6742099af3dbc58866d88fe))
+* **auth:** Add impersonate package ([#8578](https://github.com/googleapis/google-cloud-go/issues/8578)) ([e29ba0c](https://github.com/googleapis/google-cloud-go/commit/e29ba0cb7bd3888ab9e808087027dc5a32474c04))
+* **auth:** Add support for external accounts in detect ([#8508](https://github.com/googleapis/google-cloud-go/issues/8508)) ([62210d5](https://github.com/googleapis/google-cloud-go/commit/62210d5d3e56e8e9f35db8e6ac0defec19582507))
+* **auth:** Port external account changes ([#8697](https://github.com/googleapis/google-cloud-go/issues/8697)) ([5823db5](https://github.com/googleapis/google-cloud-go/commit/5823db5d633069999b58b9131a7f9cd77e82c899))
+
+
+### Bug Fixes
+
+* **auth/oauth2adapt:** Update golang.org/x/net to v0.17.0 ([174da47](https://github.com/googleapis/google-cloud-go/commit/174da47254fefb12921bbfc65b7829a453af6f5d))
+* **auth:** Update golang.org/x/net to v0.17.0 ([174da47](https://github.com/googleapis/google-cloud-go/commit/174da47254fefb12921bbfc65b7829a453af6f5d))
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
@@ -0,0 +1,40 @@
+# Google Auth Library for Go
+
+[](https://pkg.go.dev/cloud.google.com/go/auth)
+
+## Install
+
+``` bash
+go get cloud.google.com/go/auth@latest
+```
+
+## Usage
+
+The most common way this library is used is transitively, by default, from any
+of our Go client libraries.
+
+### Notable use-cases
+
+- To create a credential directly please see examples in the
+ [credentials](https://pkg.go.dev/cloud.google.com/go/auth/credentials)
+ package.
+- To create a authenticated HTTP client please see examples in the
+ [httptransport](https://pkg.go.dev/cloud.google.com/go/auth/httptransport)
+ package.
+- To create a authenticated gRPC connection please see examples in the
+ [grpctransport](https://pkg.go.dev/cloud.google.com/go/auth/grpctransport)
+ package.
+- To create an ID token please see examples in the
+ [idtoken](https://pkg.go.dev/cloud.google.com/go/auth/credentials/idtoken)
+ package.
+
+## Contributing
+
+Contributions are welcome. Please, see the
+[CONTRIBUTING](https://github.com/GoogleCloudPlatform/google-cloud-go/blob/main/CONTRIBUTING.md)
+document for details.
+
+Please note that this project is released with a Contributor Code of Conduct.
+By participating in this project you agree to abide by its terms.
+See [Contributor Code of Conduct](https://github.com/GoogleCloudPlatform/google-cloud-go/blob/main/CONTRIBUTING.md#contributor-code-of-conduct)
+for more information.
@@ -0,0 +1,618 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package auth provides utilities for managing Google Cloud credentials,
+// including functionality for creating, caching, and refreshing OAuth2 tokens.
+// It offers customizable options for different OAuth2 flows, such as 2-legged
+// (2LO) and 3-legged (3LO) OAuth, along with support for PKCE and automatic
+// token management.
+package auth
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync"
+ "time"
+
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/jwt"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+const (
+ // Parameter keys for AuthCodeURL method to support PKCE.
+ codeChallengeKey = "code_challenge"
+ codeChallengeMethodKey = "code_challenge_method"
+
+ // Parameter key for Exchange method to support PKCE.
+ codeVerifierKey = "code_verifier"
+
+ // 3 minutes and 45 seconds before expiration. The shortest MDS cache is 4 minutes,
+ // so we give it 15 seconds to refresh it's cache before attempting to refresh a token.
+ defaultExpiryDelta = 225 * time.Second
+
+ universeDomainDefault = "googleapis.com"
+)
+
+// tokenState represents different states for a [Token].
+type tokenState int
+
+const (
+ // fresh indicates that the [Token] is valid. It is not expired or close to
+ // expired, or the token has no expiry.
+ fresh tokenState = iota
+ // stale indicates that the [Token] is close to expired, and should be
+ // refreshed. The token can be used normally.
+ stale
+ // invalid indicates that the [Token] is expired or invalid. The token
+ // cannot be used for a normal operation.
+ invalid
+)
+
+var (
+ defaultGrantType = "urn:ietf:params:oauth:grant-type:jwt-bearer"
+ defaultHeader = &jwt.Header{Algorithm: jwt.HeaderAlgRSA256, Type: jwt.HeaderType}
+
+ // for testing
+ timeNow = time.Now
+)
+
+// TokenProvider specifies an interface for anything that can return a token.
+type TokenProvider interface {
+ // Token returns a Token or an error.
+ // The Token returned must be safe to use
+ // concurrently.
+ // The returned Token must not be modified.
+ // The context provided must be sent along to any requests that are made in
+ // the implementing code.
+ Token(context.Context) (*Token, error)
+}
+
+// Token holds the credential token used to authorized requests. All fields are
+// considered read-only.
+type Token struct {
+ // Value is the token used to authorize requests. It is usually an access
+ // token but may be other types of tokens such as ID tokens in some flows.
+ Value string
+ // Type is the type of token Value is. If uninitialized, it should be
+ // assumed to be a "Bearer" token.
+ Type string
+ // Expiry is the time the token is set to expire.
+ Expiry time.Time
+ // Metadata may include, but is not limited to, the body of the token
+ // response returned by the server.
+ Metadata map[string]interface{} // TODO(codyoss): maybe make a method to flatten metadata to avoid []string for url.Values
+}
+
+// IsValid reports that a [Token] is non-nil, has a [Token.Value], and has not
+// expired. A token is considered expired if [Token.Expiry] has passed or will
+// pass in the next 225 seconds.
+func (t *Token) IsValid() bool {
+ return t.isValidWithEarlyExpiry(defaultExpiryDelta)
+}
+
+// MetadataString is a convenience method for accessing string values in the
+// token's metadata. Returns an empty string if the metadata is nil or the value
+// for the given key cannot be cast to a string.
+func (t *Token) MetadataString(k string) string {
+ if t.Metadata == nil {
+ return ""
+ }
+ s, ok := t.Metadata[k].(string)
+ if !ok {
+ return ""
+ }
+ return s
+}
+
+func (t *Token) isValidWithEarlyExpiry(earlyExpiry time.Duration) bool {
+ if t.isEmpty() {
+ return false
+ }
+ if t.Expiry.IsZero() {
+ return true
+ }
+ return !t.Expiry.Round(0).Add(-earlyExpiry).Before(timeNow())
+}
+
+func (t *Token) isEmpty() bool {
+ return t == nil || t.Value == ""
+}
+
+// Credentials holds Google credentials, including
+// [Application Default Credentials].
+//
+// [Application Default Credentials]: https://developers.google.com/accounts/docs/application-default-credentials
+type Credentials struct {
+ json []byte
+ projectID CredentialsPropertyProvider
+ quotaProjectID CredentialsPropertyProvider
+ // universeDomain is the default service domain for a given Cloud universe.
+ universeDomain CredentialsPropertyProvider
+
+ TokenProvider
+}
+
+// JSON returns the bytes associated with the the file used to source
+// credentials if one was used.
+func (c *Credentials) JSON() []byte {
+ return c.json
+}
+
+// ProjectID returns the associated project ID from the underlying file or
+// environment.
+func (c *Credentials) ProjectID(ctx context.Context) (string, error) {
+ if c.projectID == nil {
+ return internal.GetProjectID(c.json, ""), nil
+ }
+ v, err := c.projectID.GetProperty(ctx)
+ if err != nil {
+ return "", err
+ }
+ return internal.GetProjectID(c.json, v), nil
+}
+
+// QuotaProjectID returns the associated quota project ID from the underlying
+// file or environment.
+func (c *Credentials) QuotaProjectID(ctx context.Context) (string, error) {
+ if c.quotaProjectID == nil {
+ return internal.GetQuotaProject(c.json, ""), nil
+ }
+ v, err := c.quotaProjectID.GetProperty(ctx)
+ if err != nil {
+ return "", err
+ }
+ return internal.GetQuotaProject(c.json, v), nil
+}
+
+// UniverseDomain returns the default service domain for a given Cloud universe.
+// The default value is "googleapis.com".
+func (c *Credentials) UniverseDomain(ctx context.Context) (string, error) {
+ if c.universeDomain == nil {
+ return universeDomainDefault, nil
+ }
+ v, err := c.universeDomain.GetProperty(ctx)
+ if err != nil {
+ return "", err
+ }
+ if v == "" {
+ return universeDomainDefault, nil
+ }
+ return v, err
+}
+
+// CredentialsPropertyProvider provides an implementation to fetch a property
+// value for [Credentials].
+type CredentialsPropertyProvider interface {
+ GetProperty(context.Context) (string, error)
+}
+
+// CredentialsPropertyFunc is a type adapter to allow the use of ordinary
+// functions as a [CredentialsPropertyProvider].
+type CredentialsPropertyFunc func(context.Context) (string, error)
+
+// GetProperty loads the properly value provided the given context.
+func (p CredentialsPropertyFunc) GetProperty(ctx context.Context) (string, error) {
+ return p(ctx)
+}
+
+// CredentialsOptions are used to configure [Credentials].
+type CredentialsOptions struct {
+ // TokenProvider is a means of sourcing a token for the credentials. Required.
+ TokenProvider TokenProvider
+ // JSON is the raw contents of the credentials file if sourced from a file.
+ JSON []byte
+ // ProjectIDProvider resolves the project ID associated with the
+ // credentials.
+ ProjectIDProvider CredentialsPropertyProvider
+ // QuotaProjectIDProvider resolves the quota project ID associated with the
+ // credentials.
+ QuotaProjectIDProvider CredentialsPropertyProvider
+ // UniverseDomainProvider resolves the universe domain with the credentials.
+ UniverseDomainProvider CredentialsPropertyProvider
+}
+
+// NewCredentials returns new [Credentials] from the provided options.
+func NewCredentials(opts *CredentialsOptions) *Credentials {
+ creds := &Credentials{
+ TokenProvider: opts.TokenProvider,
+ json: opts.JSON,
+ projectID: opts.ProjectIDProvider,
+ quotaProjectID: opts.QuotaProjectIDProvider,
+ universeDomain: opts.UniverseDomainProvider,
+ }
+
+ return creds
+}
+
+// CachedTokenProviderOptions provides options for configuring a cached
+// [TokenProvider].
+type CachedTokenProviderOptions struct {
+ // DisableAutoRefresh makes the TokenProvider always return the same token,
+ // even if it is expired. The default is false. Optional.
+ DisableAutoRefresh bool
+ // ExpireEarly configures the amount of time before a token expires, that it
+ // should be refreshed. If unset, the default value is 3 minutes and 45
+ // seconds. Optional.
+ ExpireEarly time.Duration
+ // DisableAsyncRefresh configures a synchronous workflow that refreshes
+ // tokens in a blocking manner. The default is false. Optional.
+ DisableAsyncRefresh bool
+}
+
+func (ctpo *CachedTokenProviderOptions) autoRefresh() bool {
+ if ctpo == nil {
+ return true
+ }
+ return !ctpo.DisableAutoRefresh
+}
+
+func (ctpo *CachedTokenProviderOptions) expireEarly() time.Duration {
+ if ctpo == nil || ctpo.ExpireEarly == 0 {
+ return defaultExpiryDelta
+ }
+ return ctpo.ExpireEarly
+}
+
+func (ctpo *CachedTokenProviderOptions) blockingRefresh() bool {
+ if ctpo == nil {
+ return false
+ }
+ return ctpo.DisableAsyncRefresh
+}
+
+// NewCachedTokenProvider wraps a [TokenProvider] to cache the tokens returned
+// by the underlying provider. By default it will refresh tokens asynchronously
+// a few minutes before they expire.
+func NewCachedTokenProvider(tp TokenProvider, opts *CachedTokenProviderOptions) TokenProvider {
+ if ctp, ok := tp.(*cachedTokenProvider); ok {
+ return ctp
+ }
+ return &cachedTokenProvider{
+ tp: tp,
+ autoRefresh: opts.autoRefresh(),
+ expireEarly: opts.expireEarly(),
+ blockingRefresh: opts.blockingRefresh(),
+ }
+}
+
+type cachedTokenProvider struct {
+ tp TokenProvider
+ autoRefresh bool
+ expireEarly time.Duration
+ blockingRefresh bool
+
+ mu sync.Mutex
+ cachedToken *Token
+ // isRefreshRunning ensures that the non-blocking refresh will only be
+ // attempted once, even if multiple callers enter the Token method.
+ isRefreshRunning bool
+ // isRefreshErr ensures that the non-blocking refresh will only be attempted
+ // once per refresh window if an error is encountered.
+ isRefreshErr bool
+}
+
+func (c *cachedTokenProvider) Token(ctx context.Context) (*Token, error) {
+ if c.blockingRefresh {
+ return c.tokenBlocking(ctx)
+ }
+ return c.tokenNonBlocking(ctx)
+}
+
+func (c *cachedTokenProvider) tokenNonBlocking(ctx context.Context) (*Token, error) {
+ switch c.tokenState() {
+ case fresh:
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return c.cachedToken, nil
+ case stale:
+ // Call tokenAsync with a new Context because the user-provided context
+ // may have a short timeout incompatible with async token refresh.
+ c.tokenAsync(context.Background())
+ // Return the stale token immediately to not block customer requests to Cloud services.
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return c.cachedToken, nil
+ default: // invalid
+ return c.tokenBlocking(ctx)
+ }
+}
+
+// tokenState reports the token's validity.
+func (c *cachedTokenProvider) tokenState() tokenState {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ t := c.cachedToken
+ now := timeNow()
+ if t == nil || t.Value == "" {
+ return invalid
+ } else if t.Expiry.IsZero() {
+ return fresh
+ } else if now.After(t.Expiry.Round(0)) {
+ return invalid
+ } else if now.After(t.Expiry.Round(0).Add(-c.expireEarly)) {
+ return stale
+ }
+ return fresh
+}
+
+// tokenAsync uses a bool to ensure that only one non-blocking token refresh
+// happens at a time, even if multiple callers have entered this function
+// concurrently. This avoids creating an arbitrary number of concurrent
+// goroutines. Retries should be attempted and managed within the Token method.
+// If the refresh attempt fails, no further attempts are made until the refresh
+// window expires and the token enters the invalid state, at which point the
+// blocking call to Token should likely return the same error on the main goroutine.
+func (c *cachedTokenProvider) tokenAsync(ctx context.Context) {
+ fn := func() {
+ c.mu.Lock()
+ c.isRefreshRunning = true
+ c.mu.Unlock()
+ t, err := c.tp.Token(ctx)
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ c.isRefreshRunning = false
+ if err != nil {
+ // Discard errors from the non-blocking refresh, but prevent further
+ // attempts.
+ c.isRefreshErr = true
+ return
+ }
+ c.cachedToken = t
+ }
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ if !c.isRefreshRunning && !c.isRefreshErr {
+ go fn()
+ }
+}
+
+func (c *cachedTokenProvider) tokenBlocking(ctx context.Context) (*Token, error) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ c.isRefreshErr = false
+ if c.cachedToken.IsValid() || (!c.autoRefresh && !c.cachedToken.isEmpty()) {
+ return c.cachedToken, nil
+ }
+ t, err := c.tp.Token(ctx)
+ if err != nil {
+ return nil, err
+ }
+ c.cachedToken = t
+ return t, nil
+}
+
+// Error is a error associated with retrieving a [Token]. It can hold useful
+// additional details for debugging.
+type Error struct {
+ // Response is the HTTP response associated with error. The body will always
+ // be already closed and consumed.
+ Response *http.Response
+ // Body is the HTTP response body.
+ Body []byte
+ // Err is the underlying wrapped error.
+ Err error
+
+ // code returned in the token response
+ code string
+ // description returned in the token response
+ description string
+ // uri returned in the token response
+ uri string
+}
+
+func (e *Error) Error() string {
+ if e.code != "" {
+ s := fmt.Sprintf("auth: %q", e.code)
+ if e.description != "" {
+ s += fmt.Sprintf(" %q", e.description)
+ }
+ if e.uri != "" {
+ s += fmt.Sprintf(" %q", e.uri)
+ }
+ return s
+ }
+ return fmt.Sprintf("auth: cannot fetch token: %v\nResponse: %s", e.Response.StatusCode, e.Body)
+}
+
+// Temporary returns true if the error is considered temporary and may be able
+// to be retried.
+func (e *Error) Temporary() bool {
+ if e.Response == nil {
+ return false
+ }
+ sc := e.Response.StatusCode
+ return sc == http.StatusInternalServerError || sc == http.StatusServiceUnavailable || sc == http.StatusRequestTimeout || sc == http.StatusTooManyRequests
+}
+
+func (e *Error) Unwrap() error {
+ return e.Err
+}
+
+// Style describes how the token endpoint wants to receive the ClientID and
+// ClientSecret.
+type Style int
+
+const (
+ // StyleUnknown means the value has not been initiated. Sending this in
+ // a request will cause the token exchange to fail.
+ StyleUnknown Style = iota
+ // StyleInParams sends client info in the body of a POST request.
+ StyleInParams
+ // StyleInHeader sends client info using Basic Authorization header.
+ StyleInHeader
+)
+
+// Options2LO is the configuration settings for doing a 2-legged JWT OAuth2 flow.
+type Options2LO struct {
+ // Email is the OAuth2 client ID. This value is set as the "iss" in the
+ // JWT.
+ Email string
+ // PrivateKey contains the contents of an RSA private key or the
+ // contents of a PEM file that contains a private key. It is used to sign
+ // the JWT created.
+ PrivateKey []byte
+ // TokenURL is th URL the JWT is sent to. Required.
+ TokenURL string
+ // PrivateKeyID is the ID of the key used to sign the JWT. It is used as the
+ // "kid" in the JWT header. Optional.
+ PrivateKeyID string
+ // Subject is the used for to impersonate a user. It is used as the "sub" in
+ // the JWT.m Optional.
+ Subject string
+ // Scopes specifies requested permissions for the token. Optional.
+ Scopes []string
+ // Expires specifies the lifetime of the token. Optional.
+ Expires time.Duration
+ // Audience specifies the "aud" in the JWT. Optional.
+ Audience string
+ // PrivateClaims allows specifying any custom claims for the JWT. Optional.
+ PrivateClaims map[string]interface{}
+
+ // Client is the client to be used to make the underlying token requests.
+ // Optional.
+ Client *http.Client
+ // UseIDToken requests that the token returned be an ID token if one is
+ // returned from the server. Optional.
+ UseIDToken bool
+ // Logger is used for debug logging. If provided, logging will be enabled
+ // at the loggers configured level. By default logging is disabled unless
+ // enabled by setting GOOGLE_SDK_GO_LOGGING_LEVEL in which case a default
+ // logger will be used. Optional.
+ Logger *slog.Logger
+}
+
+func (o *Options2LO) client() *http.Client {
+ if o.Client != nil {
+ return o.Client
+ }
+ return internal.DefaultClient()
+}
+
+func (o *Options2LO) validate() error {
+ if o == nil {
+ return errors.New("auth: options must be provided")
+ }
+ if o.Email == "" {
+ return errors.New("auth: email must be provided")
+ }
+ if len(o.PrivateKey) == 0 {
+ return errors.New("auth: private key must be provided")
+ }
+ if o.TokenURL == "" {
+ return errors.New("auth: token URL must be provided")
+ }
+ return nil
+}
+
+// New2LOTokenProvider returns a [TokenProvider] from the provided options.
+func New2LOTokenProvider(opts *Options2LO) (TokenProvider, error) {
+ if err := opts.validate(); err != nil {
+ return nil, err
+ }
+ return tokenProvider2LO{opts: opts, Client: opts.client(), logger: internallog.New(opts.Logger)}, nil
+}
+
+type tokenProvider2LO struct {
+ opts *Options2LO
+ Client *http.Client
+ logger *slog.Logger
+}
+
+func (tp tokenProvider2LO) Token(ctx context.Context) (*Token, error) {
+ pk, err := internal.ParseKey(tp.opts.PrivateKey)
+ if err != nil {
+ return nil, err
+ }
+ claimSet := &jwt.Claims{
+ Iss: tp.opts.Email,
+ Scope: strings.Join(tp.opts.Scopes, " "),
+ Aud: tp.opts.TokenURL,
+ AdditionalClaims: tp.opts.PrivateClaims,
+ Sub: tp.opts.Subject,
+ }
+ if t := tp.opts.Expires; t > 0 {
+ claimSet.Exp = time.Now().Add(t).Unix()
+ }
+ if aud := tp.opts.Audience; aud != "" {
+ claimSet.Aud = aud
+ }
+ h := *defaultHeader
+ h.KeyID = tp.opts.PrivateKeyID
+ payload, err := jwt.EncodeJWS(&h, claimSet, pk)
+ if err != nil {
+ return nil, err
+ }
+ v := url.Values{}
+ v.Set("grant_type", defaultGrantType)
+ v.Set("assertion", payload)
+ req, err := http.NewRequestWithContext(ctx, "POST", tp.opts.TokenURL, strings.NewReader(v.Encode()))
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
+ tp.logger.DebugContext(ctx, "2LO token request", "request", internallog.HTTPRequest(req, []byte(v.Encode())))
+ resp, body, err := internal.DoRequest(tp.Client, req)
+ if err != nil {
+ return nil, fmt.Errorf("auth: cannot fetch token: %w", err)
+ }
+ tp.logger.DebugContext(ctx, "2LO token response", "response", internallog.HTTPResponse(resp, body))
+ if c := resp.StatusCode; c < http.StatusOK || c >= http.StatusMultipleChoices {
+ return nil, &Error{
+ Response: resp,
+ Body: body,
+ }
+ }
+ // tokenRes is the JSON response body.
+ var tokenRes struct {
+ AccessToken string `json:"access_token"`
+ TokenType string `json:"token_type"`
+ IDToken string `json:"id_token"`
+ ExpiresIn int64 `json:"expires_in"`
+ }
+ if err := json.Unmarshal(body, &tokenRes); err != nil {
+ return nil, fmt.Errorf("auth: cannot fetch token: %w", err)
+ }
+ token := &Token{
+ Value: tokenRes.AccessToken,
+ Type: tokenRes.TokenType,
+ }
+ token.Metadata = make(map[string]interface{})
+ json.Unmarshal(body, &token.Metadata) // no error checks for optional fields
+
+ if secs := tokenRes.ExpiresIn; secs > 0 {
+ token.Expiry = time.Now().Add(time.Duration(secs) * time.Second)
+ }
+ if v := tokenRes.IDToken; v != "" {
+ // decode returned id token to get expiry
+ claimSet, err := jwt.DecodeJWS(v)
+ if err != nil {
+ return nil, fmt.Errorf("auth: error decoding JWT token: %w", err)
+ }
+ token.Expiry = time.Unix(claimSet.Exp, 0)
+ }
+ if tp.opts.UseIDToken {
+ if tokenRes.IDToken == "" {
+ return nil, fmt.Errorf("auth: response doesn't have JWT token")
+ }
+ token.Value = tokenRes.IDToken
+ }
+ return token, nil
+}
@@ -0,0 +1,90 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package credentials
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/url"
+ "strings"
+ "time"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/compute/metadata"
+)
+
+var (
+ computeTokenMetadata = map[string]interface{}{
+ "auth.google.tokenSource": "compute-metadata",
+ "auth.google.serviceAccount": "default",
+ }
+ computeTokenURI = "instance/service-accounts/default/token"
+)
+
+// computeTokenProvider creates a [cloud.google.com/go/auth.TokenProvider] that
+// uses the metadata service to retrieve tokens.
+func computeTokenProvider(opts *DetectOptions, client *metadata.Client) auth.TokenProvider {
+ return auth.NewCachedTokenProvider(&computeProvider{
+ scopes: opts.Scopes,
+ client: client,
+ }, &auth.CachedTokenProviderOptions{
+ ExpireEarly: opts.EarlyTokenRefresh,
+ DisableAsyncRefresh: opts.DisableAsyncRefresh,
+ })
+}
+
+// computeProvider fetches tokens from the google cloud metadata service.
+type computeProvider struct {
+ scopes []string
+ client *metadata.Client
+}
+
+type metadataTokenResp struct {
+ AccessToken string `json:"access_token"`
+ ExpiresInSec int `json:"expires_in"`
+ TokenType string `json:"token_type"`
+}
+
+func (cs *computeProvider) Token(ctx context.Context) (*auth.Token, error) {
+ tokenURI, err := url.Parse(computeTokenURI)
+ if err != nil {
+ return nil, err
+ }
+ if len(cs.scopes) > 0 {
+ v := url.Values{}
+ v.Set("scopes", strings.Join(cs.scopes, ","))
+ tokenURI.RawQuery = v.Encode()
+ }
+ tokenJSON, err := cs.client.GetWithContext(ctx, tokenURI.String())
+ if err != nil {
+ return nil, fmt.Errorf("credentials: cannot fetch token: %w", err)
+ }
+ var res metadataTokenResp
+ if err := json.NewDecoder(strings.NewReader(tokenJSON)).Decode(&res); err != nil {
+ return nil, fmt.Errorf("credentials: invalid token JSON from metadata: %w", err)
+ }
+ if res.ExpiresInSec == 0 || res.AccessToken == "" {
+ return nil, errors.New("credentials: incomplete token received from metadata")
+ }
+ return &auth.Token{
+ Value: res.AccessToken,
+ Type: res.TokenType,
+ Expiry: time.Now().Add(time.Duration(res.ExpiresInSec) * time.Second),
+ Metadata: computeTokenMetadata,
+ }, nil
+
+}
@@ -0,0 +1,279 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package credentials
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "os"
+ "time"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/credsfile"
+ "cloud.google.com/go/compute/metadata"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+const (
+ // jwtTokenURL is Google's OAuth 2.0 token URL to use with the JWT(2LO) flow.
+ jwtTokenURL = "https://oauth2.googleapis.com/token"
+
+ // Google's OAuth 2.0 default endpoints.
+ googleAuthURL = "https://accounts.google.com/o/oauth2/auth"
+ googleTokenURL = "https://oauth2.googleapis.com/token"
+
+ // GoogleMTLSTokenURL is Google's default OAuth2.0 mTLS endpoint.
+ GoogleMTLSTokenURL = "https://oauth2.mtls.googleapis.com/token"
+
+ // Help on default credentials
+ adcSetupURL = "https://cloud.google.com/docs/authentication/external/set-up-adc"
+)
+
+var (
+ // for testing
+ allowOnGCECheck = true
+)
+
+// OnGCE reports whether this process is running in Google Cloud.
+func OnGCE() bool {
+ // TODO(codyoss): once all libs use this auth lib move metadata check here
+ return allowOnGCECheck && metadata.OnGCE()
+}
+
+// DetectDefault searches for "Application Default Credentials" and returns
+// a credential based on the [DetectOptions] provided.
+//
+// It looks for credentials in the following places, preferring the first
+// location found:
+//
+// - A JSON file whose path is specified by the GOOGLE_APPLICATION_CREDENTIALS
+// environment variable. For workload identity federation, refer to
+// https://cloud.google.com/iam/docs/how-to#using-workload-identity-federation
+// on how to generate the JSON configuration file for on-prem/non-Google
+// cloud platforms.
+// - A JSON file in a location known to the gcloud command-line tool. On
+// Windows, this is %APPDATA%/gcloud/application_default_credentials.json. On
+// other systems, $HOME/.config/gcloud/application_default_credentials.json.
+// - On Google Compute Engine, Google App Engine standard second generation
+// runtimes, and Google App Engine flexible environment, it fetches
+// credentials from the metadata server.
+func DetectDefault(opts *DetectOptions) (*auth.Credentials, error) {
+ if err := opts.validate(); err != nil {
+ return nil, err
+ }
+ if len(opts.CredentialsJSON) > 0 {
+ return readCredentialsFileJSON(opts.CredentialsJSON, opts)
+ }
+ if opts.CredentialsFile != "" {
+ return readCredentialsFile(opts.CredentialsFile, opts)
+ }
+ if filename := os.Getenv(credsfile.GoogleAppCredsEnvVar); filename != "" {
+ creds, err := readCredentialsFile(filename, opts)
+ if err != nil {
+ return nil, err
+ }
+ return creds, nil
+ }
+
+ fileName := credsfile.GetWellKnownFileName()
+ if b, err := os.ReadFile(fileName); err == nil {
+ return readCredentialsFileJSON(b, opts)
+ }
+
+ if OnGCE() {
+ metadataClient := metadata.NewWithOptions(&metadata.Options{
+ Logger: opts.logger(),
+ })
+ return auth.NewCredentials(&auth.CredentialsOptions{
+ TokenProvider: computeTokenProvider(opts, metadataClient),
+ ProjectIDProvider: auth.CredentialsPropertyFunc(func(ctx context.Context) (string, error) {
+ return metadataClient.ProjectIDWithContext(ctx)
+ }),
+ UniverseDomainProvider: &internal.ComputeUniverseDomainProvider{
+ MetadataClient: metadataClient,
+ },
+ }), nil
+ }
+
+ return nil, fmt.Errorf("credentials: could not find default credentials. See %v for more information", adcSetupURL)
+}
+
+// DetectOptions provides configuration for [DetectDefault].
+type DetectOptions struct {
+ // Scopes that credentials tokens should have. Example:
+ // https://www.googleapis.com/auth/cloud-platform. Required if Audience is
+ // not provided.
+ Scopes []string
+ // Audience that credentials tokens should have. Only applicable for 2LO
+ // flows with service accounts. If specified, scopes should not be provided.
+ Audience string
+ // Subject is the user email used for [domain wide delegation](https://developers.google.com/identity/protocols/oauth2/service-account#delegatingauthority).
+ // Optional.
+ Subject string
+ // EarlyTokenRefresh configures how early before a token expires that it
+ // should be refreshed. Once the token’s time until expiration has entered
+ // this refresh window the token is considered valid but stale. If unset,
+ // the default value is 3 minutes and 45 seconds. Optional.
+ EarlyTokenRefresh time.Duration
+ // DisableAsyncRefresh configures a synchronous workflow that refreshes
+ // stale tokens while blocking. The default is false. Optional.
+ DisableAsyncRefresh bool
+ // AuthHandlerOptions configures an authorization handler and other options
+ // for 3LO flows. It is required, and only used, for client credential
+ // flows.
+ AuthHandlerOptions *auth.AuthorizationHandlerOptions
+ // TokenURL allows to set the token endpoint for user credential flows. If
+ // unset the default value is: https://oauth2.googleapis.com/token.
+ // Optional.
+ TokenURL string
+ // STSAudience is the audience sent to when retrieving an STS token.
+ // Currently this only used for GDCH auth flow, for which it is required.
+ STSAudience string
+ // CredentialsFile overrides detection logic and sources a credential file
+ // from the provided filepath. If provided, CredentialsJSON must not be.
+ // Optional.
+ CredentialsFile string
+ // CredentialsJSON overrides detection logic and uses the JSON bytes as the
+ // source for the credential. If provided, CredentialsFile must not be.
+ // Optional.
+ CredentialsJSON []byte
+ // UseSelfSignedJWT directs service account based credentials to create a
+ // self-signed JWT with the private key found in the file, skipping any
+ // network requests that would normally be made. Optional.
+ UseSelfSignedJWT bool
+ // Client configures the underlying client used to make network requests
+ // when fetching tokens. Optional.
+ Client *http.Client
+ // UniverseDomain is the default service domain for a given Cloud universe.
+ // The default value is "googleapis.com". This option is ignored for
+ // authentication flows that do not support universe domain. Optional.
+ UniverseDomain string
+ // Logger is used for debug logging. If provided, logging will be enabled
+ // at the loggers configured level. By default logging is disabled unless
+ // enabled by setting GOOGLE_SDK_GO_LOGGING_LEVEL in which case a default
+ // logger will be used. Optional.
+ Logger *slog.Logger
+}
+
+func (o *DetectOptions) validate() error {
+ if o == nil {
+ return errors.New("credentials: options must be provided")
+ }
+ if len(o.Scopes) > 0 && o.Audience != "" {
+ return errors.New("credentials: both scopes and audience were provided")
+ }
+ if len(o.CredentialsJSON) > 0 && o.CredentialsFile != "" {
+ return errors.New("credentials: both credentials file and JSON were provided")
+ }
+ return nil
+}
+
+func (o *DetectOptions) tokenURL() string {
+ if o.TokenURL != "" {
+ return o.TokenURL
+ }
+ return googleTokenURL
+}
+
+func (o *DetectOptions) scopes() []string {
+ scopes := make([]string, len(o.Scopes))
+ copy(scopes, o.Scopes)
+ return scopes
+}
+
+func (o *DetectOptions) client() *http.Client {
+ if o.Client != nil {
+ return o.Client
+ }
+ return internal.DefaultClient()
+}
+
+func (o *DetectOptions) logger() *slog.Logger {
+ return internallog.New(o.Logger)
+}
+
+func readCredentialsFile(filename string, opts *DetectOptions) (*auth.Credentials, error) {
+ b, err := os.ReadFile(filename)
+ if err != nil {
+ return nil, err
+ }
+ return readCredentialsFileJSON(b, opts)
+}
+
+func readCredentialsFileJSON(b []byte, opts *DetectOptions) (*auth.Credentials, error) {
+ // attempt to parse jsonData as a Google Developers Console client_credentials.json.
+ config := clientCredConfigFromJSON(b, opts)
+ if config != nil {
+ if config.AuthHandlerOpts == nil {
+ return nil, errors.New("credentials: auth handler must be specified for this credential filetype")
+ }
+ tp, err := auth.New3LOTokenProvider(config)
+ if err != nil {
+ return nil, err
+ }
+ return auth.NewCredentials(&auth.CredentialsOptions{
+ TokenProvider: tp,
+ JSON: b,
+ }), nil
+ }
+ return fileCredentials(b, opts)
+}
+
+func clientCredConfigFromJSON(b []byte, opts *DetectOptions) *auth.Options3LO {
+ var creds credsfile.ClientCredentialsFile
+ var c *credsfile.Config3LO
+ if err := json.Unmarshal(b, &creds); err != nil {
+ return nil
+ }
+ switch {
+ case creds.Web != nil:
+ c = creds.Web
+ case creds.Installed != nil:
+ c = creds.Installed
+ default:
+ return nil
+ }
+ if len(c.RedirectURIs) < 1 {
+ return nil
+ }
+ var handleOpts *auth.AuthorizationHandlerOptions
+ if opts.AuthHandlerOptions != nil {
+ handleOpts = &auth.AuthorizationHandlerOptions{
+ Handler: opts.AuthHandlerOptions.Handler,
+ State: opts.AuthHandlerOptions.State,
+ PKCEOpts: opts.AuthHandlerOptions.PKCEOpts,
+ }
+ }
+ return &auth.Options3LO{
+ ClientID: c.ClientID,
+ ClientSecret: c.ClientSecret,
+ RedirectURL: c.RedirectURIs[0],
+ Scopes: opts.scopes(),
+ AuthURL: c.AuthURI,
+ TokenURL: c.TokenURI,
+ Client: opts.client(),
+ Logger: opts.logger(),
+ EarlyTokenExpiry: opts.EarlyTokenRefresh,
+ AuthHandlerOpts: handleOpts,
+ // TODO(codyoss): refactor this out. We need to add in auto-detection
+ // for this use case.
+ AuthStyle: auth.StyleInParams,
+ }
+}
@@ -0,0 +1,45 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package credentials provides support for making OAuth2 authorized and
+// authenticated HTTP requests to Google APIs. It supports the Web server flow,
+// client-side credentials, service accounts, Google Compute Engine service
+// accounts, Google App Engine service accounts and workload identity federation
+// from non-Google cloud platforms.
+//
+// A brief overview of the package follows. For more information, please read
+// https://developers.google.com/accounts/docs/OAuth2
+// and
+// https://developers.google.com/accounts/docs/application-default-credentials.
+// For more information on using workload identity federation, refer to
+// https://cloud.google.com/iam/docs/how-to#using-workload-identity-federation.
+//
+// # Credentials
+//
+// The [cloud.google.com/go/auth.Credentials] type represents Google
+// credentials, including Application Default Credentials.
+//
+// Use [DetectDefault] to obtain Application Default Credentials.
+//
+// Application Default Credentials support workload identity federation to
+// access Google Cloud resources from non-Google Cloud platforms including Amazon
+// Web Services (AWS), Microsoft Azure or any identity provider that supports
+// OpenID Connect (OIDC). Workload identity federation is recommended for
+// non-Google Cloud environments as it avoids the need to download, manage, and
+// store service account private keys locally.
+//
+// # Workforce Identity Federation
+//
+// For more information on this feature see [cloud.google.com/go/auth/credentials/externalaccount].
+package credentials
@@ -0,0 +1,231 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package credentials
+
+import (
+ "errors"
+ "fmt"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/credentials/internal/externalaccount"
+ "cloud.google.com/go/auth/credentials/internal/externalaccountuser"
+ "cloud.google.com/go/auth/credentials/internal/gdch"
+ "cloud.google.com/go/auth/credentials/internal/impersonate"
+ internalauth "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/credsfile"
+)
+
+func fileCredentials(b []byte, opts *DetectOptions) (*auth.Credentials, error) {
+ fileType, err := credsfile.ParseFileType(b)
+ if err != nil {
+ return nil, err
+ }
+
+ var projectID, universeDomain string
+ var tp auth.TokenProvider
+ switch fileType {
+ case credsfile.ServiceAccountKey:
+ f, err := credsfile.ParseServiceAccount(b)
+ if err != nil {
+ return nil, err
+ }
+ tp, err = handleServiceAccount(f, opts)
+ if err != nil {
+ return nil, err
+ }
+ projectID = f.ProjectID
+ universeDomain = resolveUniverseDomain(opts.UniverseDomain, f.UniverseDomain)
+ case credsfile.UserCredentialsKey:
+ f, err := credsfile.ParseUserCredentials(b)
+ if err != nil {
+ return nil, err
+ }
+ tp, err = handleUserCredential(f, opts)
+ if err != nil {
+ return nil, err
+ }
+ universeDomain = f.UniverseDomain
+ case credsfile.ExternalAccountKey:
+ f, err := credsfile.ParseExternalAccount(b)
+ if err != nil {
+ return nil, err
+ }
+ tp, err = handleExternalAccount(f, opts)
+ if err != nil {
+ return nil, err
+ }
+ universeDomain = resolveUniverseDomain(opts.UniverseDomain, f.UniverseDomain)
+ case credsfile.ExternalAccountAuthorizedUserKey:
+ f, err := credsfile.ParseExternalAccountAuthorizedUser(b)
+ if err != nil {
+ return nil, err
+ }
+ tp, err = handleExternalAccountAuthorizedUser(f, opts)
+ if err != nil {
+ return nil, err
+ }
+ universeDomain = f.UniverseDomain
+ case credsfile.ImpersonatedServiceAccountKey:
+ f, err := credsfile.ParseImpersonatedServiceAccount(b)
+ if err != nil {
+ return nil, err
+ }
+ tp, err = handleImpersonatedServiceAccount(f, opts)
+ if err != nil {
+ return nil, err
+ }
+ universeDomain = resolveUniverseDomain(opts.UniverseDomain, f.UniverseDomain)
+ case credsfile.GDCHServiceAccountKey:
+ f, err := credsfile.ParseGDCHServiceAccount(b)
+ if err != nil {
+ return nil, err
+ }
+ tp, err = handleGDCHServiceAccount(f, opts)
+ if err != nil {
+ return nil, err
+ }
+ projectID = f.Project
+ universeDomain = f.UniverseDomain
+ default:
+ return nil, fmt.Errorf("credentials: unsupported filetype %q", fileType)
+ }
+ return auth.NewCredentials(&auth.CredentialsOptions{
+ TokenProvider: auth.NewCachedTokenProvider(tp, &auth.CachedTokenProviderOptions{
+ ExpireEarly: opts.EarlyTokenRefresh,
+ }),
+ JSON: b,
+ ProjectIDProvider: internalauth.StaticCredentialsProperty(projectID),
+ // TODO(codyoss): only set quota project here if there was a user override
+ UniverseDomainProvider: internalauth.StaticCredentialsProperty(universeDomain),
+ }), nil
+}
+
+// resolveUniverseDomain returns optsUniverseDomain if non-empty, in order to
+// support configuring universe-specific credentials in code. Auth flows
+// unsupported for universe domain should not use this func, but should instead
+// simply set the file universe domain on the credentials.
+func resolveUniverseDomain(optsUniverseDomain, fileUniverseDomain string) string {
+ if optsUniverseDomain != "" {
+ return optsUniverseDomain
+ }
+ return fileUniverseDomain
+}
+
+func handleServiceAccount(f *credsfile.ServiceAccountFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ ud := resolveUniverseDomain(opts.UniverseDomain, f.UniverseDomain)
+ if opts.UseSelfSignedJWT {
+ return configureSelfSignedJWT(f, opts)
+ } else if ud != "" && ud != internalauth.DefaultUniverseDomain {
+ // For non-GDU universe domains, token exchange is impossible and services
+ // must support self-signed JWTs.
+ opts.UseSelfSignedJWT = true
+ return configureSelfSignedJWT(f, opts)
+ }
+ opts2LO := &auth.Options2LO{
+ Email: f.ClientEmail,
+ PrivateKey: []byte(f.PrivateKey),
+ PrivateKeyID: f.PrivateKeyID,
+ Scopes: opts.scopes(),
+ TokenURL: f.TokenURL,
+ Subject: opts.Subject,
+ Client: opts.client(),
+ Logger: opts.logger(),
+ }
+ if opts2LO.TokenURL == "" {
+ opts2LO.TokenURL = jwtTokenURL
+ }
+ return auth.New2LOTokenProvider(opts2LO)
+}
+
+func handleUserCredential(f *credsfile.UserCredentialsFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ opts3LO := &auth.Options3LO{
+ ClientID: f.ClientID,
+ ClientSecret: f.ClientSecret,
+ Scopes: opts.scopes(),
+ AuthURL: googleAuthURL,
+ TokenURL: opts.tokenURL(),
+ AuthStyle: auth.StyleInParams,
+ EarlyTokenExpiry: opts.EarlyTokenRefresh,
+ RefreshToken: f.RefreshToken,
+ Client: opts.client(),
+ Logger: opts.logger(),
+ }
+ return auth.New3LOTokenProvider(opts3LO)
+}
+
+func handleExternalAccount(f *credsfile.ExternalAccountFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ externalOpts := &externalaccount.Options{
+ Audience: f.Audience,
+ SubjectTokenType: f.SubjectTokenType,
+ TokenURL: f.TokenURL,
+ TokenInfoURL: f.TokenInfoURL,
+ ServiceAccountImpersonationURL: f.ServiceAccountImpersonationURL,
+ ClientSecret: f.ClientSecret,
+ ClientID: f.ClientID,
+ CredentialSource: f.CredentialSource,
+ QuotaProjectID: f.QuotaProjectID,
+ Scopes: opts.scopes(),
+ WorkforcePoolUserProject: f.WorkforcePoolUserProject,
+ Client: opts.client(),
+ Logger: opts.logger(),
+ IsDefaultClient: opts.Client == nil,
+ }
+ if f.ServiceAccountImpersonation != nil {
+ externalOpts.ServiceAccountImpersonationLifetimeSeconds = f.ServiceAccountImpersonation.TokenLifetimeSeconds
+ }
+ return externalaccount.NewTokenProvider(externalOpts)
+}
+
+func handleExternalAccountAuthorizedUser(f *credsfile.ExternalAccountAuthorizedUserFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ externalOpts := &externalaccountuser.Options{
+ Audience: f.Audience,
+ RefreshToken: f.RefreshToken,
+ TokenURL: f.TokenURL,
+ TokenInfoURL: f.TokenInfoURL,
+ ClientID: f.ClientID,
+ ClientSecret: f.ClientSecret,
+ Scopes: opts.scopes(),
+ Client: opts.client(),
+ Logger: opts.logger(),
+ }
+ return externalaccountuser.NewTokenProvider(externalOpts)
+}
+
+func handleImpersonatedServiceAccount(f *credsfile.ImpersonatedServiceAccountFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ if f.ServiceAccountImpersonationURL == "" || f.CredSource == nil {
+ return nil, errors.New("missing 'source_credentials' field or 'service_account_impersonation_url' in credentials")
+ }
+
+ tp, err := fileCredentials(f.CredSource, opts)
+ if err != nil {
+ return nil, err
+ }
+ return impersonate.NewTokenProvider(&impersonate.Options{
+ URL: f.ServiceAccountImpersonationURL,
+ Scopes: opts.scopes(),
+ Tp: tp,
+ Delegates: f.Delegates,
+ Client: opts.client(),
+ Logger: opts.logger(),
+ })
+}
+
+func handleGDCHServiceAccount(f *credsfile.GDCHServiceAccountFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ return gdch.NewTokenProvider(f, &gdch.Options{
+ STSAudience: opts.STSAudience,
+ Client: opts.client(),
+ Logger: opts.logger(),
+ })
+}
@@ -0,0 +1,531 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccount
+
+import (
+ "bytes"
+ "context"
+ "crypto/hmac"
+ "crypto/sha256"
+ "encoding/hex"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/url"
+ "os"
+ "path"
+ "sort"
+ "strings"
+ "time"
+
+ "cloud.google.com/go/auth/internal"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+var (
+ // getenv aliases os.Getenv for testing
+ getenv = os.Getenv
+)
+
+const (
+ // AWS Signature Version 4 signing algorithm identifier.
+ awsAlgorithm = "AWS4-HMAC-SHA256"
+
+ // The termination string for the AWS credential scope value as defined in
+ // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
+ awsRequestType = "aws4_request"
+
+ // The AWS authorization header name for the security session token if available.
+ awsSecurityTokenHeader = "x-amz-security-token"
+
+ // The name of the header containing the session token for metadata endpoint calls
+ awsIMDSv2SessionTokenHeader = "X-aws-ec2-metadata-token"
+
+ awsIMDSv2SessionTTLHeader = "X-aws-ec2-metadata-token-ttl-seconds"
+
+ awsIMDSv2SessionTTL = "300"
+
+ // The AWS authorization header name for the auto-generated date.
+ awsDateHeader = "x-amz-date"
+
+ defaultRegionalCredentialVerificationURL = "https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15"
+
+ // Supported AWS configuration environment variables.
+ awsAccessKeyIDEnvVar = "AWS_ACCESS_KEY_ID"
+ awsDefaultRegionEnvVar = "AWS_DEFAULT_REGION"
+ awsRegionEnvVar = "AWS_REGION"
+ awsSecretAccessKeyEnvVar = "AWS_SECRET_ACCESS_KEY"
+ awsSessionTokenEnvVar = "AWS_SESSION_TOKEN"
+
+ awsTimeFormatLong = "20060102T150405Z"
+ awsTimeFormatShort = "20060102"
+ awsProviderType = "aws"
+)
+
+type awsSubjectProvider struct {
+ EnvironmentID string
+ RegionURL string
+ RegionalCredVerificationURL string
+ CredVerificationURL string
+ IMDSv2SessionTokenURL string
+ TargetResource string
+ requestSigner *awsRequestSigner
+ region string
+ securityCredentialsProvider AwsSecurityCredentialsProvider
+ reqOpts *RequestOptions
+
+ Client *http.Client
+ logger *slog.Logger
+}
+
+func (sp *awsSubjectProvider) subjectToken(ctx context.Context) (string, error) {
+ // Set Defaults
+ if sp.RegionalCredVerificationURL == "" {
+ sp.RegionalCredVerificationURL = defaultRegionalCredentialVerificationURL
+ }
+ headers := make(map[string]string)
+ if sp.shouldUseMetadataServer() {
+ awsSessionToken, err := sp.getAWSSessionToken(ctx)
+ if err != nil {
+ return "", err
+ }
+
+ if awsSessionToken != "" {
+ headers[awsIMDSv2SessionTokenHeader] = awsSessionToken
+ }
+ }
+
+ awsSecurityCredentials, err := sp.getSecurityCredentials(ctx, headers)
+ if err != nil {
+ return "", err
+ }
+ if sp.region, err = sp.getRegion(ctx, headers); err != nil {
+ return "", err
+ }
+ sp.requestSigner = &awsRequestSigner{
+ RegionName: sp.region,
+ AwsSecurityCredentials: awsSecurityCredentials,
+ }
+
+ // Generate the signed request to AWS STS GetCallerIdentity API.
+ // Use the required regional endpoint. Otherwise, the request will fail.
+ req, err := http.NewRequestWithContext(ctx, "POST", strings.Replace(sp.RegionalCredVerificationURL, "{region}", sp.region, 1), nil)
+ if err != nil {
+ return "", err
+ }
+ // The full, canonical resource name of the workload identity pool
+ // provider, with or without the HTTPS prefix.
+ // Including this header as part of the signature is recommended to
+ // ensure data integrity.
+ if sp.TargetResource != "" {
+ req.Header.Set("x-goog-cloud-target-resource", sp.TargetResource)
+ }
+ sp.requestSigner.signRequest(req)
+
+ /*
+ The GCP STS endpoint expects the headers to be formatted as:
+ # [
+ # {key: 'x-amz-date', value: '...'},
+ # {key: 'Authorization', value: '...'},
+ # ...
+ # ]
+ # And then serialized as:
+ # quote(json.dumps({
+ # url: '...',
+ # method: 'POST',
+ # headers: [{key: 'x-amz-date', value: '...'}, ...]
+ # }))
+ */
+
+ awsSignedReq := awsRequest{
+ URL: req.URL.String(),
+ Method: "POST",
+ }
+ for headerKey, headerList := range req.Header {
+ for _, headerValue := range headerList {
+ awsSignedReq.Headers = append(awsSignedReq.Headers, awsRequestHeader{
+ Key: headerKey,
+ Value: headerValue,
+ })
+ }
+ }
+ sort.Slice(awsSignedReq.Headers, func(i, j int) bool {
+ headerCompare := strings.Compare(awsSignedReq.Headers[i].Key, awsSignedReq.Headers[j].Key)
+ if headerCompare == 0 {
+ return strings.Compare(awsSignedReq.Headers[i].Value, awsSignedReq.Headers[j].Value) < 0
+ }
+ return headerCompare < 0
+ })
+
+ result, err := json.Marshal(awsSignedReq)
+ if err != nil {
+ return "", err
+ }
+ return url.QueryEscape(string(result)), nil
+}
+
+func (sp *awsSubjectProvider) providerType() string {
+ if sp.securityCredentialsProvider != nil {
+ return programmaticProviderType
+ }
+ return awsProviderType
+}
+
+func (sp *awsSubjectProvider) getAWSSessionToken(ctx context.Context) (string, error) {
+ if sp.IMDSv2SessionTokenURL == "" {
+ return "", nil
+ }
+ req, err := http.NewRequestWithContext(ctx, "PUT", sp.IMDSv2SessionTokenURL, nil)
+ if err != nil {
+ return "", err
+ }
+ req.Header.Set(awsIMDSv2SessionTTLHeader, awsIMDSv2SessionTTL)
+
+ sp.logger.DebugContext(ctx, "aws session token request", "request", internallog.HTTPRequest(req, nil))
+ resp, body, err := internal.DoRequest(sp.Client, req)
+ if err != nil {
+ return "", err
+ }
+ sp.logger.DebugContext(ctx, "aws session token response", "response", internallog.HTTPResponse(resp, body))
+ if resp.StatusCode != http.StatusOK {
+ return "", fmt.Errorf("credentials: unable to retrieve AWS session token: %s", body)
+ }
+ return string(body), nil
+}
+
+func (sp *awsSubjectProvider) getRegion(ctx context.Context, headers map[string]string) (string, error) {
+ if sp.securityCredentialsProvider != nil {
+ return sp.securityCredentialsProvider.AwsRegion(ctx, sp.reqOpts)
+ }
+ if canRetrieveRegionFromEnvironment() {
+ if envAwsRegion := getenv(awsRegionEnvVar); envAwsRegion != "" {
+ return envAwsRegion, nil
+ }
+ return getenv(awsDefaultRegionEnvVar), nil
+ }
+
+ if sp.RegionURL == "" {
+ return "", errors.New("credentials: unable to determine AWS region")
+ }
+
+ req, err := http.NewRequestWithContext(ctx, "GET", sp.RegionURL, nil)
+ if err != nil {
+ return "", err
+ }
+
+ for name, value := range headers {
+ req.Header.Add(name, value)
+ }
+ sp.logger.DebugContext(ctx, "aws region request", "request", internallog.HTTPRequest(req, nil))
+ resp, body, err := internal.DoRequest(sp.Client, req)
+ if err != nil {
+ return "", err
+ }
+ sp.logger.DebugContext(ctx, "aws region response", "response", internallog.HTTPResponse(resp, body))
+ if resp.StatusCode != http.StatusOK {
+ return "", fmt.Errorf("credentials: unable to retrieve AWS region - %s", body)
+ }
+
+ // This endpoint will return the region in format: us-east-2b.
+ // Only the us-east-2 part should be used.
+ bodyLen := len(body)
+ if bodyLen == 0 {
+ return "", nil
+ }
+ return string(body[:bodyLen-1]), nil
+}
+
+func (sp *awsSubjectProvider) getSecurityCredentials(ctx context.Context, headers map[string]string) (result *AwsSecurityCredentials, err error) {
+ if sp.securityCredentialsProvider != nil {
+ return sp.securityCredentialsProvider.AwsSecurityCredentials(ctx, sp.reqOpts)
+ }
+ if canRetrieveSecurityCredentialFromEnvironment() {
+ return &AwsSecurityCredentials{
+ AccessKeyID: getenv(awsAccessKeyIDEnvVar),
+ SecretAccessKey: getenv(awsSecretAccessKeyEnvVar),
+ SessionToken: getenv(awsSessionTokenEnvVar),
+ }, nil
+ }
+
+ roleName, err := sp.getMetadataRoleName(ctx, headers)
+ if err != nil {
+ return
+ }
+ credentials, err := sp.getMetadataSecurityCredentials(ctx, roleName, headers)
+ if err != nil {
+ return
+ }
+
+ if credentials.AccessKeyID == "" {
+ return result, errors.New("credentials: missing AccessKeyId credential")
+ }
+ if credentials.SecretAccessKey == "" {
+ return result, errors.New("credentials: missing SecretAccessKey credential")
+ }
+
+ return credentials, nil
+}
+
+func (sp *awsSubjectProvider) getMetadataSecurityCredentials(ctx context.Context, roleName string, headers map[string]string) (*AwsSecurityCredentials, error) {
+ var result *AwsSecurityCredentials
+
+ req, err := http.NewRequestWithContext(ctx, "GET", fmt.Sprintf("%s/%s", sp.CredVerificationURL, roleName), nil)
+ if err != nil {
+ return result, err
+ }
+ for name, value := range headers {
+ req.Header.Add(name, value)
+ }
+ sp.logger.DebugContext(ctx, "aws security credential request", "request", internallog.HTTPRequest(req, nil))
+ resp, body, err := internal.DoRequest(sp.Client, req)
+ if err != nil {
+ return result, err
+ }
+ sp.logger.DebugContext(ctx, "aws security credential response", "response", internallog.HTTPResponse(resp, body))
+ if resp.StatusCode != http.StatusOK {
+ return result, fmt.Errorf("credentials: unable to retrieve AWS security credentials - %s", body)
+ }
+ if err := json.Unmarshal(body, &result); err != nil {
+ return nil, err
+ }
+ return result, nil
+}
+
+func (sp *awsSubjectProvider) getMetadataRoleName(ctx context.Context, headers map[string]string) (string, error) {
+ if sp.CredVerificationURL == "" {
+ return "", errors.New("credentials: unable to determine the AWS metadata server security credentials endpoint")
+ }
+ req, err := http.NewRequestWithContext(ctx, "GET", sp.CredVerificationURL, nil)
+ if err != nil {
+ return "", err
+ }
+ for name, value := range headers {
+ req.Header.Add(name, value)
+ }
+
+ sp.logger.DebugContext(ctx, "aws metadata role request", "request", internallog.HTTPRequest(req, nil))
+ resp, body, err := internal.DoRequest(sp.Client, req)
+ if err != nil {
+ return "", err
+ }
+ sp.logger.DebugContext(ctx, "aws metadata role response", "response", internallog.HTTPResponse(resp, body))
+ if resp.StatusCode != http.StatusOK {
+ return "", fmt.Errorf("credentials: unable to retrieve AWS role name - %s", body)
+ }
+ return string(body), nil
+}
+
+// awsRequestSigner is a utility class to sign http requests using a AWS V4 signature.
+type awsRequestSigner struct {
+ RegionName string
+ AwsSecurityCredentials *AwsSecurityCredentials
+}
+
+// signRequest adds the appropriate headers to an http.Request
+// or returns an error if something prevented this.
+func (rs *awsRequestSigner) signRequest(req *http.Request) error {
+ // req is assumed non-nil
+ signedRequest := cloneRequest(req)
+ timestamp := Now()
+ signedRequest.Header.Set("host", requestHost(req))
+ if rs.AwsSecurityCredentials.SessionToken != "" {
+ signedRequest.Header.Set(awsSecurityTokenHeader, rs.AwsSecurityCredentials.SessionToken)
+ }
+ if signedRequest.Header.Get("date") == "" {
+ signedRequest.Header.Set(awsDateHeader, timestamp.Format(awsTimeFormatLong))
+ }
+ authorizationCode, err := rs.generateAuthentication(signedRequest, timestamp)
+ if err != nil {
+ return err
+ }
+ signedRequest.Header.Set("Authorization", authorizationCode)
+ req.Header = signedRequest.Header
+ return nil
+}
+
+func (rs *awsRequestSigner) generateAuthentication(req *http.Request, timestamp time.Time) (string, error) {
+ canonicalHeaderColumns, canonicalHeaderData := canonicalHeaders(req)
+ dateStamp := timestamp.Format(awsTimeFormatShort)
+ serviceName := ""
+
+ if splitHost := strings.Split(requestHost(req), "."); len(splitHost) > 0 {
+ serviceName = splitHost[0]
+ }
+ credentialScope := strings.Join([]string{dateStamp, rs.RegionName, serviceName, awsRequestType}, "/")
+ requestString, err := canonicalRequest(req, canonicalHeaderColumns, canonicalHeaderData)
+ if err != nil {
+ return "", err
+ }
+ requestHash, err := getSha256([]byte(requestString))
+ if err != nil {
+ return "", err
+ }
+
+ stringToSign := strings.Join([]string{awsAlgorithm, timestamp.Format(awsTimeFormatLong), credentialScope, requestHash}, "\n")
+ signingKey := []byte("AWS4" + rs.AwsSecurityCredentials.SecretAccessKey)
+ for _, signingInput := range []string{
+ dateStamp, rs.RegionName, serviceName, awsRequestType, stringToSign,
+ } {
+ signingKey, err = getHmacSha256(signingKey, []byte(signingInput))
+ if err != nil {
+ return "", err
+ }
+ }
+
+ return fmt.Sprintf("%s Credential=%s/%s, SignedHeaders=%s, Signature=%s", awsAlgorithm, rs.AwsSecurityCredentials.AccessKeyID, credentialScope, canonicalHeaderColumns, hex.EncodeToString(signingKey)), nil
+}
+
+func getSha256(input []byte) (string, error) {
+ hash := sha256.New()
+ if _, err := hash.Write(input); err != nil {
+ return "", err
+ }
+ return hex.EncodeToString(hash.Sum(nil)), nil
+}
+
+func getHmacSha256(key, input []byte) ([]byte, error) {
+ hash := hmac.New(sha256.New, key)
+ if _, err := hash.Write(input); err != nil {
+ return nil, err
+ }
+ return hash.Sum(nil), nil
+}
+
+func cloneRequest(r *http.Request) *http.Request {
+ r2 := new(http.Request)
+ *r2 = *r
+ if r.Header != nil {
+ r2.Header = make(http.Header, len(r.Header))
+
+ // Find total number of values.
+ headerCount := 0
+ for _, headerValues := range r.Header {
+ headerCount += len(headerValues)
+ }
+ copiedHeaders := make([]string, headerCount) // shared backing array for headers' values
+
+ for headerKey, headerValues := range r.Header {
+ headerCount = copy(copiedHeaders, headerValues)
+ r2.Header[headerKey] = copiedHeaders[:headerCount:headerCount]
+ copiedHeaders = copiedHeaders[headerCount:]
+ }
+ }
+ return r2
+}
+
+func canonicalPath(req *http.Request) string {
+ result := req.URL.EscapedPath()
+ if result == "" {
+ return "/"
+ }
+ return path.Clean(result)
+}
+
+func canonicalQuery(req *http.Request) string {
+ queryValues := req.URL.Query()
+ for queryKey := range queryValues {
+ sort.Strings(queryValues[queryKey])
+ }
+ return queryValues.Encode()
+}
+
+func canonicalHeaders(req *http.Request) (string, string) {
+ // Header keys need to be sorted alphabetically.
+ var headers []string
+ lowerCaseHeaders := make(http.Header)
+ for k, v := range req.Header {
+ k := strings.ToLower(k)
+ if _, ok := lowerCaseHeaders[k]; ok {
+ // include additional values
+ lowerCaseHeaders[k] = append(lowerCaseHeaders[k], v...)
+ } else {
+ headers = append(headers, k)
+ lowerCaseHeaders[k] = v
+ }
+ }
+ sort.Strings(headers)
+
+ var fullHeaders bytes.Buffer
+ for _, header := range headers {
+ headerValue := strings.Join(lowerCaseHeaders[header], ",")
+ fullHeaders.WriteString(header)
+ fullHeaders.WriteRune(':')
+ fullHeaders.WriteString(headerValue)
+ fullHeaders.WriteRune('\n')
+ }
+
+ return strings.Join(headers, ";"), fullHeaders.String()
+}
+
+func requestDataHash(req *http.Request) (string, error) {
+ var requestData []byte
+ if req.Body != nil {
+ requestBody, err := req.GetBody()
+ if err != nil {
+ return "", err
+ }
+ defer requestBody.Close()
+
+ requestData, err = internal.ReadAll(requestBody)
+ if err != nil {
+ return "", err
+ }
+ }
+
+ return getSha256(requestData)
+}
+
+func requestHost(req *http.Request) string {
+ if req.Host != "" {
+ return req.Host
+ }
+ return req.URL.Host
+}
+
+func canonicalRequest(req *http.Request, canonicalHeaderColumns, canonicalHeaderData string) (string, error) {
+ dataHash, err := requestDataHash(req)
+ if err != nil {
+ return "", err
+ }
+ return fmt.Sprintf("%s\n%s\n%s\n%s\n%s\n%s", req.Method, canonicalPath(req), canonicalQuery(req), canonicalHeaderData, canonicalHeaderColumns, dataHash), nil
+}
+
+type awsRequestHeader struct {
+ Key string `json:"key"`
+ Value string `json:"value"`
+}
+
+type awsRequest struct {
+ URL string `json:"url"`
+ Method string `json:"method"`
+ Headers []awsRequestHeader `json:"headers"`
+}
+
+// The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. Only one is
+// required.
+func canRetrieveRegionFromEnvironment() bool {
+ return getenv(awsRegionEnvVar) != "" || getenv(awsDefaultRegionEnvVar) != ""
+}
+
+// Check if both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are available.
+func canRetrieveSecurityCredentialFromEnvironment() bool {
+ return getenv(awsAccessKeyIDEnvVar) != "" && getenv(awsSecretAccessKeyEnvVar) != ""
+}
+
+func (sp *awsSubjectProvider) shouldUseMetadataServer() bool {
+ return sp.securityCredentialsProvider == nil && (!canRetrieveRegionFromEnvironment() || !canRetrieveSecurityCredentialFromEnvironment())
+}
@@ -0,0 +1,284 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccount
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "os"
+ "os/exec"
+ "regexp"
+ "strings"
+ "time"
+
+ "cloud.google.com/go/auth/internal"
+)
+
+const (
+ executableSupportedMaxVersion = 1
+ executableDefaultTimeout = 30 * time.Second
+ executableSource = "response"
+ executableProviderType = "executable"
+ outputFileSource = "output file"
+
+ allowExecutablesEnvVar = "GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES"
+
+ jwtTokenType = "urn:ietf:params:oauth:token-type:jwt"
+ idTokenType = "urn:ietf:params:oauth:token-type:id_token"
+ saml2TokenType = "urn:ietf:params:oauth:token-type:saml2"
+)
+
+var (
+ serviceAccountImpersonationRE = regexp.MustCompile(`https://iamcredentials..+/v1/projects/-/serviceAccounts/(.*@.*):generateAccessToken`)
+)
+
+type nonCacheableError struct {
+ message string
+}
+
+func (nce nonCacheableError) Error() string {
+ return nce.message
+}
+
+// environment is a contract for testing
+type environment interface {
+ existingEnv() []string
+ getenv(string) string
+ run(ctx context.Context, command string, env []string) ([]byte, error)
+ now() time.Time
+}
+
+type runtimeEnvironment struct{}
+
+func (r runtimeEnvironment) existingEnv() []string {
+ return os.Environ()
+}
+func (r runtimeEnvironment) getenv(key string) string {
+ return os.Getenv(key)
+}
+func (r runtimeEnvironment) now() time.Time {
+ return time.Now().UTC()
+}
+
+func (r runtimeEnvironment) run(ctx context.Context, command string, env []string) ([]byte, error) {
+ splitCommand := strings.Fields(command)
+ cmd := exec.CommandContext(ctx, splitCommand[0], splitCommand[1:]...)
+ cmd.Env = env
+
+ var stdout, stderr bytes.Buffer
+ cmd.Stdout = &stdout
+ cmd.Stderr = &stderr
+
+ if err := cmd.Run(); err != nil {
+ if ctx.Err() == context.DeadlineExceeded {
+ return nil, context.DeadlineExceeded
+ }
+ if exitError, ok := err.(*exec.ExitError); ok {
+ return nil, exitCodeError(exitError)
+ }
+ return nil, executableError(err)
+ }
+
+ bytesStdout := bytes.TrimSpace(stdout.Bytes())
+ if len(bytesStdout) > 0 {
+ return bytesStdout, nil
+ }
+ return bytes.TrimSpace(stderr.Bytes()), nil
+}
+
+type executableSubjectProvider struct {
+ Command string
+ Timeout time.Duration
+ OutputFile string
+ client *http.Client
+ opts *Options
+ env environment
+}
+
+type executableResponse struct {
+ Version int `json:"version,omitempty"`
+ Success *bool `json:"success,omitempty"`
+ TokenType string `json:"token_type,omitempty"`
+ ExpirationTime int64 `json:"expiration_time,omitempty"`
+ IDToken string `json:"id_token,omitempty"`
+ SamlResponse string `json:"saml_response,omitempty"`
+ Code string `json:"code,omitempty"`
+ Message string `json:"message,omitempty"`
+}
+
+func (sp *executableSubjectProvider) parseSubjectTokenFromSource(response []byte, source string, now int64) (string, error) {
+ var result executableResponse
+ if err := json.Unmarshal(response, &result); err != nil {
+ return "", jsonParsingError(source, string(response))
+ }
+ // Validate
+ if result.Version == 0 {
+ return "", missingFieldError(source, "version")
+ }
+ if result.Success == nil {
+ return "", missingFieldError(source, "success")
+ }
+ if !*result.Success {
+ if result.Code == "" || result.Message == "" {
+ return "", malformedFailureError()
+ }
+ return "", userDefinedError(result.Code, result.Message)
+ }
+ if result.Version > executableSupportedMaxVersion || result.Version < 0 {
+ return "", unsupportedVersionError(source, result.Version)
+ }
+ if result.ExpirationTime == 0 && sp.OutputFile != "" {
+ return "", missingFieldError(source, "expiration_time")
+ }
+ if result.TokenType == "" {
+ return "", missingFieldError(source, "token_type")
+ }
+ if result.ExpirationTime != 0 && result.ExpirationTime < now {
+ return "", tokenExpiredError()
+ }
+
+ switch result.TokenType {
+ case jwtTokenType, idTokenType:
+ if result.IDToken == "" {
+ return "", missingFieldError(source, "id_token")
+ }
+ return result.IDToken, nil
+ case saml2TokenType:
+ if result.SamlResponse == "" {
+ return "", missingFieldError(source, "saml_response")
+ }
+ return result.SamlResponse, nil
+ default:
+ return "", tokenTypeError(source)
+ }
+}
+
+func (sp *executableSubjectProvider) subjectToken(ctx context.Context) (string, error) {
+ if token, err := sp.getTokenFromOutputFile(); token != "" || err != nil {
+ return token, err
+ }
+ return sp.getTokenFromExecutableCommand(ctx)
+}
+
+func (sp *executableSubjectProvider) providerType() string {
+ return executableProviderType
+}
+
+func (sp *executableSubjectProvider) getTokenFromOutputFile() (token string, err error) {
+ if sp.OutputFile == "" {
+ // This ExecutableCredentialSource doesn't use an OutputFile.
+ return "", nil
+ }
+
+ file, err := os.Open(sp.OutputFile)
+ if err != nil {
+ // No OutputFile found. Hasn't been created yet, so skip it.
+ return "", nil
+ }
+ defer file.Close()
+
+ data, err := internal.ReadAll(file)
+ if err != nil || len(data) == 0 {
+ // Cachefile exists, but no data found. Get new credential.
+ return "", nil
+ }
+
+ token, err = sp.parseSubjectTokenFromSource(data, outputFileSource, sp.env.now().Unix())
+ if err != nil {
+ if _, ok := err.(nonCacheableError); ok {
+ // If the cached token is expired we need a new token,
+ // and if the cache contains a failure, we need to try again.
+ return "", nil
+ }
+
+ // There was an error in the cached token, and the developer should be aware of it.
+ return "", err
+ }
+ // Token parsing succeeded. Use found token.
+ return token, nil
+}
+
+func (sp *executableSubjectProvider) executableEnvironment() []string {
+ result := sp.env.existingEnv()
+ result = append(result, fmt.Sprintf("GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE=%v", sp.opts.Audience))
+ result = append(result, fmt.Sprintf("GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE=%v", sp.opts.SubjectTokenType))
+ result = append(result, "GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE=0")
+ if sp.opts.ServiceAccountImpersonationURL != "" {
+ matches := serviceAccountImpersonationRE.FindStringSubmatch(sp.opts.ServiceAccountImpersonationURL)
+ if matches != nil {
+ result = append(result, fmt.Sprintf("GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL=%v", matches[1]))
+ }
+ }
+ if sp.OutputFile != "" {
+ result = append(result, fmt.Sprintf("GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE=%v", sp.OutputFile))
+ }
+ return result
+}
+
+func (sp *executableSubjectProvider) getTokenFromExecutableCommand(ctx context.Context) (string, error) {
+ // For security reasons, we need our consumers to set this environment variable to allow executables to be run.
+ if sp.env.getenv(allowExecutablesEnvVar) != "1" {
+ return "", errors.New("credentials: executables need to be explicitly allowed (set GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES to '1') to run")
+ }
+
+ ctx, cancel := context.WithDeadline(ctx, sp.env.now().Add(sp.Timeout))
+ defer cancel()
+
+ output, err := sp.env.run(ctx, sp.Command, sp.executableEnvironment())
+ if err != nil {
+ return "", err
+ }
+ return sp.parseSubjectTokenFromSource(output, executableSource, sp.env.now().Unix())
+}
+
+func missingFieldError(source, field string) error {
+ return fmt.Errorf("credentials: %q missing %q field", source, field)
+}
+
+func jsonParsingError(source, data string) error {
+ return fmt.Errorf("credentials: unable to parse %q: %v", source, data)
+}
+
+func malformedFailureError() error {
+ return nonCacheableError{"credentials: response must include `error` and `message` fields when unsuccessful"}
+}
+
+func userDefinedError(code, message string) error {
+ return nonCacheableError{fmt.Sprintf("credentials: response contains unsuccessful response: (%v) %v", code, message)}
+}
+
+func unsupportedVersionError(source string, version int) error {
+ return fmt.Errorf("credentials: %v contains unsupported version: %v", source, version)
+}
+
+func tokenExpiredError() error {
+ return nonCacheableError{"credentials: the token returned by the executable is expired"}
+}
+
+func tokenTypeError(source string) error {
+ return fmt.Errorf("credentials: %v contains unsupported token type", source)
+}
+
+func exitCodeError(err *exec.ExitError) error {
+ return fmt.Errorf("credentials: executable command failed with exit code %v: %w", err.ExitCode(), err)
+}
+
+func executableError(err error) error {
+ return fmt.Errorf("credentials: executable command failed: %w", err)
+}
@@ -0,0 +1,428 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccount
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "regexp"
+ "strconv"
+ "strings"
+ "time"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/credentials/internal/impersonate"
+ "cloud.google.com/go/auth/credentials/internal/stsexchange"
+ "cloud.google.com/go/auth/internal/credsfile"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+const (
+ timeoutMinimum = 5 * time.Second
+ timeoutMaximum = 120 * time.Second
+
+ universeDomainPlaceholder = "UNIVERSE_DOMAIN"
+ defaultTokenURL = "https://sts.UNIVERSE_DOMAIN/v1/token"
+ defaultUniverseDomain = "googleapis.com"
+)
+
+var (
+ // Now aliases time.Now for testing
+ Now = func() time.Time {
+ return time.Now().UTC()
+ }
+ validWorkforceAudiencePattern *regexp.Regexp = regexp.MustCompile(`//iam\.googleapis\.com/locations/[^/]+/workforcePools/`)
+)
+
+// Options stores the configuration for fetching tokens with external credentials.
+type Options struct {
+ // Audience is the Secure Token Service (STS) audience which contains the resource name for the workload
+ // identity pool or the workforce pool and the provider identifier in that pool.
+ Audience string
+ // SubjectTokenType is the STS token type based on the Oauth2.0 token exchange spec
+ // e.g. `urn:ietf:params:oauth:token-type:jwt`.
+ SubjectTokenType string
+ // TokenURL is the STS token exchange endpoint.
+ TokenURL string
+ // TokenInfoURL is the token_info endpoint used to retrieve the account related information (
+ // user attributes like account identifier, eg. email, username, uid, etc). This is
+ // needed for gCloud session account identification.
+ TokenInfoURL string
+ // ServiceAccountImpersonationURL is the URL for the service account impersonation request. This is only
+ // required for workload identity pools when APIs to be accessed have not integrated with UberMint.
+ ServiceAccountImpersonationURL string
+ // ServiceAccountImpersonationLifetimeSeconds is the number of seconds the service account impersonation
+ // token will be valid for.
+ ServiceAccountImpersonationLifetimeSeconds int
+ // ClientSecret is currently only required if token_info endpoint also
+ // needs to be called with the generated GCP access token. When provided, STS will be
+ // called with additional basic authentication using client_id as username and client_secret as password.
+ ClientSecret string
+ // ClientID is only required in conjunction with ClientSecret, as described above.
+ ClientID string
+ // CredentialSource contains the necessary information to retrieve the token itself, as well
+ // as some environmental information.
+ CredentialSource *credsfile.CredentialSource
+ // QuotaProjectID is injected by gCloud. If the value is non-empty, the Auth libraries
+ // will set the x-goog-user-project which overrides the project associated with the credentials.
+ QuotaProjectID string
+ // Scopes contains the desired scopes for the returned access token.
+ Scopes []string
+ // WorkforcePoolUserProject should be set when it is a workforce pool and
+ // not a workload identity pool. The underlying principal must still have
+ // serviceusage.services.use IAM permission to use the project for
+ // billing/quota. Optional.
+ WorkforcePoolUserProject string
+ // UniverseDomain is the default service domain for a given Cloud universe.
+ // This value will be used in the default STS token URL. The default value
+ // is "googleapis.com". It will not be used if TokenURL is set. Optional.
+ UniverseDomain string
+ // SubjectTokenProvider is an optional token provider for OIDC/SAML
+ // credentials. One of SubjectTokenProvider, AWSSecurityCredentialProvider
+ // or CredentialSource must be provided. Optional.
+ SubjectTokenProvider SubjectTokenProvider
+ // AwsSecurityCredentialsProvider is an AWS Security Credential provider
+ // for AWS credentials. One of SubjectTokenProvider,
+ // AWSSecurityCredentialProvider or CredentialSource must be provided. Optional.
+ AwsSecurityCredentialsProvider AwsSecurityCredentialsProvider
+ // Client for token request.
+ Client *http.Client
+ // IsDefaultClient marks whether the client passed in is a default client that can be overriden.
+ // This is important for X509 credentials which should create a new client if the default was used
+ // but should respect a client explicitly passed in by the user.
+ IsDefaultClient bool
+ // Logger is used for debug logging. If provided, logging will be enabled
+ // at the loggers configured level. By default logging is disabled unless
+ // enabled by setting GOOGLE_SDK_GO_LOGGING_LEVEL in which case a default
+ // logger will be used. Optional.
+ Logger *slog.Logger
+}
+
+// SubjectTokenProvider can be used to supply a subject token to exchange for a
+// GCP access token.
+type SubjectTokenProvider interface {
+ // SubjectToken should return a valid subject token or an error.
+ // The external account token provider does not cache the returned subject
+ // token, so caching logic should be implemented in the provider to prevent
+ // multiple requests for the same subject token.
+ SubjectToken(ctx context.Context, opts *RequestOptions) (string, error)
+}
+
+// RequestOptions contains information about the requested subject token or AWS
+// security credentials from the Google external account credential.
+type RequestOptions struct {
+ // Audience is the requested audience for the external account credential.
+ Audience string
+ // Subject token type is the requested subject token type for the external
+ // account credential. Expected values include:
+ // “urn:ietf:params:oauth:token-type:jwt”
+ // “urn:ietf:params:oauth:token-type:id-token”
+ // “urn:ietf:params:oauth:token-type:saml2”
+ // “urn:ietf:params:aws:token-type:aws4_request”
+ SubjectTokenType string
+}
+
+// AwsSecurityCredentialsProvider can be used to supply AwsSecurityCredentials
+// and an AWS Region to exchange for a GCP access token.
+type AwsSecurityCredentialsProvider interface {
+ // AwsRegion should return the AWS region or an error.
+ AwsRegion(ctx context.Context, opts *RequestOptions) (string, error)
+ // GetAwsSecurityCredentials should return a valid set of
+ // AwsSecurityCredentials or an error. The external account token provider
+ // does not cache the returned security credentials, so caching logic should
+ // be implemented in the provider to prevent multiple requests for the
+ // same security credentials.
+ AwsSecurityCredentials(ctx context.Context, opts *RequestOptions) (*AwsSecurityCredentials, error)
+}
+
+// AwsSecurityCredentials models AWS security credentials.
+type AwsSecurityCredentials struct {
+ // AccessKeyId is the AWS Access Key ID - Required.
+ AccessKeyID string `json:"AccessKeyID"`
+ // SecretAccessKey is the AWS Secret Access Key - Required.
+ SecretAccessKey string `json:"SecretAccessKey"`
+ // SessionToken is the AWS Session token. This should be provided for
+ // temporary AWS security credentials - Optional.
+ SessionToken string `json:"Token"`
+}
+
+func (o *Options) validate() error {
+ if o.Audience == "" {
+ return fmt.Errorf("externalaccount: Audience must be set")
+ }
+ if o.SubjectTokenType == "" {
+ return fmt.Errorf("externalaccount: Subject token type must be set")
+ }
+ if o.WorkforcePoolUserProject != "" {
+ if valid := validWorkforceAudiencePattern.MatchString(o.Audience); !valid {
+ return fmt.Errorf("externalaccount: workforce_pool_user_project should not be set for non-workforce pool credentials")
+ }
+ }
+ count := 0
+ if o.CredentialSource != nil {
+ count++
+ }
+ if o.SubjectTokenProvider != nil {
+ count++
+ }
+ if o.AwsSecurityCredentialsProvider != nil {
+ count++
+ }
+ if count == 0 {
+ return fmt.Errorf("externalaccount: one of CredentialSource, SubjectTokenProvider, or AwsSecurityCredentialsProvider must be set")
+ }
+ if count > 1 {
+ return fmt.Errorf("externalaccount: only one of CredentialSource, SubjectTokenProvider, or AwsSecurityCredentialsProvider must be set")
+ }
+ return nil
+}
+
+// client returns the http client that should be used for the token exchange. If a non-default client
+// is provided, then the client configured in the options will always be returned. If a default client
+// is provided and the options are configured for X509 credentials, a new client will be created.
+func (o *Options) client() (*http.Client, error) {
+ // If a client was provided and no override certificate config location was provided, use the provided client.
+ if o.CredentialSource == nil || o.CredentialSource.Certificate == nil || (!o.IsDefaultClient && o.CredentialSource.Certificate.CertificateConfigLocation == "") {
+ return o.Client, nil
+ }
+
+ // If a new client should be created, validate and use the certificate source to create a new mTLS client.
+ cert := o.CredentialSource.Certificate
+ if !cert.UseDefaultCertificateConfig && cert.CertificateConfigLocation == "" {
+ return nil, errors.New("credentials: \"certificate\" object must either specify a certificate_config_location or use_default_certificate_config should be true")
+ }
+ if cert.UseDefaultCertificateConfig && cert.CertificateConfigLocation != "" {
+ return nil, errors.New("credentials: \"certificate\" object cannot specify both a certificate_config_location and use_default_certificate_config=true")
+ }
+ return createX509Client(cert.CertificateConfigLocation)
+}
+
+// resolveTokenURL sets the default STS token endpoint with the configured
+// universe domain.
+func (o *Options) resolveTokenURL() {
+ if o.TokenURL != "" {
+ return
+ } else if o.UniverseDomain != "" {
+ o.TokenURL = strings.Replace(defaultTokenURL, universeDomainPlaceholder, o.UniverseDomain, 1)
+ } else {
+ o.TokenURL = strings.Replace(defaultTokenURL, universeDomainPlaceholder, defaultUniverseDomain, 1)
+ }
+}
+
+// NewTokenProvider returns a [cloud.google.com/go/auth.TokenProvider]
+// configured with the provided options.
+func NewTokenProvider(opts *Options) (auth.TokenProvider, error) {
+ if err := opts.validate(); err != nil {
+ return nil, err
+ }
+ opts.resolveTokenURL()
+ logger := internallog.New(opts.Logger)
+ stp, err := newSubjectTokenProvider(opts)
+ if err != nil {
+ return nil, err
+ }
+
+ client, err := opts.client()
+ if err != nil {
+ return nil, err
+ }
+
+ tp := &tokenProvider{
+ client: client,
+ opts: opts,
+ stp: stp,
+ logger: logger,
+ }
+
+ if opts.ServiceAccountImpersonationURL == "" {
+ return auth.NewCachedTokenProvider(tp, nil), nil
+ }
+
+ scopes := make([]string, len(opts.Scopes))
+ copy(scopes, opts.Scopes)
+ // needed for impersonation
+ tp.opts.Scopes = []string{"https://www.googleapis.com/auth/cloud-platform"}
+ imp, err := impersonate.NewTokenProvider(&impersonate.Options{
+ Client: client,
+ URL: opts.ServiceAccountImpersonationURL,
+ Scopes: scopes,
+ Tp: auth.NewCachedTokenProvider(tp, nil),
+ TokenLifetimeSeconds: opts.ServiceAccountImpersonationLifetimeSeconds,
+ Logger: logger,
+ })
+ if err != nil {
+ return nil, err
+ }
+ return auth.NewCachedTokenProvider(imp, nil), nil
+}
+
+type subjectTokenProvider interface {
+ subjectToken(ctx context.Context) (string, error)
+ providerType() string
+}
+
+// tokenProvider is the provider that handles external credentials. It is used to retrieve Tokens.
+type tokenProvider struct {
+ client *http.Client
+ logger *slog.Logger
+ opts *Options
+ stp subjectTokenProvider
+}
+
+func (tp *tokenProvider) Token(ctx context.Context) (*auth.Token, error) {
+ subjectToken, err := tp.stp.subjectToken(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ stsRequest := &stsexchange.TokenRequest{
+ GrantType: stsexchange.GrantType,
+ Audience: tp.opts.Audience,
+ Scope: tp.opts.Scopes,
+ RequestedTokenType: stsexchange.TokenType,
+ SubjectToken: subjectToken,
+ SubjectTokenType: tp.opts.SubjectTokenType,
+ }
+ header := make(http.Header)
+ header.Set("Content-Type", "application/x-www-form-urlencoded")
+ header.Add("x-goog-api-client", getGoogHeaderValue(tp.opts, tp.stp))
+ clientAuth := stsexchange.ClientAuthentication{
+ AuthStyle: auth.StyleInHeader,
+ ClientID: tp.opts.ClientID,
+ ClientSecret: tp.opts.ClientSecret,
+ }
+ var options map[string]interface{}
+ // Do not pass workforce_pool_user_project when client authentication is used.
+ // The client ID is sufficient for determining the user project.
+ if tp.opts.WorkforcePoolUserProject != "" && tp.opts.ClientID == "" {
+ options = map[string]interface{}{
+ "userProject": tp.opts.WorkforcePoolUserProject,
+ }
+ }
+ stsResp, err := stsexchange.ExchangeToken(ctx, &stsexchange.Options{
+ Client: tp.client,
+ Endpoint: tp.opts.TokenURL,
+ Request: stsRequest,
+ Authentication: clientAuth,
+ Headers: header,
+ ExtraOpts: options,
+ Logger: tp.logger,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ tok := &auth.Token{
+ Value: stsResp.AccessToken,
+ Type: stsResp.TokenType,
+ }
+ // The RFC8693 doesn't define the explicit 0 of "expires_in" field behavior.
+ if stsResp.ExpiresIn <= 0 {
+ return nil, fmt.Errorf("credentials: got invalid expiry from security token service")
+ }
+ tok.Expiry = Now().Add(time.Duration(stsResp.ExpiresIn) * time.Second)
+ return tok, nil
+}
+
+// newSubjectTokenProvider determines the type of credsfile.CredentialSource needed to create a
+// subjectTokenProvider
+func newSubjectTokenProvider(o *Options) (subjectTokenProvider, error) {
+ logger := internallog.New(o.Logger)
+ reqOpts := &RequestOptions{Audience: o.Audience, SubjectTokenType: o.SubjectTokenType}
+ if o.AwsSecurityCredentialsProvider != nil {
+ return &awsSubjectProvider{
+ securityCredentialsProvider: o.AwsSecurityCredentialsProvider,
+ TargetResource: o.Audience,
+ reqOpts: reqOpts,
+ logger: logger,
+ }, nil
+ } else if o.SubjectTokenProvider != nil {
+ return &programmaticProvider{stp: o.SubjectTokenProvider, opts: reqOpts}, nil
+ } else if len(o.CredentialSource.EnvironmentID) > 3 && o.CredentialSource.EnvironmentID[:3] == "aws" {
+ if awsVersion, err := strconv.Atoi(o.CredentialSource.EnvironmentID[3:]); err == nil {
+ if awsVersion != 1 {
+ return nil, fmt.Errorf("credentials: aws version '%d' is not supported in the current build", awsVersion)
+ }
+
+ awsProvider := &awsSubjectProvider{
+ EnvironmentID: o.CredentialSource.EnvironmentID,
+ RegionURL: o.CredentialSource.RegionURL,
+ RegionalCredVerificationURL: o.CredentialSource.RegionalCredVerificationURL,
+ CredVerificationURL: o.CredentialSource.URL,
+ TargetResource: o.Audience,
+ Client: o.Client,
+ logger: logger,
+ }
+ if o.CredentialSource.IMDSv2SessionTokenURL != "" {
+ awsProvider.IMDSv2SessionTokenURL = o.CredentialSource.IMDSv2SessionTokenURL
+ }
+
+ return awsProvider, nil
+ }
+ } else if o.CredentialSource.File != "" {
+ return &fileSubjectProvider{File: o.CredentialSource.File, Format: o.CredentialSource.Format}, nil
+ } else if o.CredentialSource.URL != "" {
+ return &urlSubjectProvider{
+ URL: o.CredentialSource.URL,
+ Headers: o.CredentialSource.Headers,
+ Format: o.CredentialSource.Format,
+ Client: o.Client,
+ Logger: logger,
+ }, nil
+ } else if o.CredentialSource.Executable != nil {
+ ec := o.CredentialSource.Executable
+ if ec.Command == "" {
+ return nil, errors.New("credentials: missing `command` field — executable command must be provided")
+ }
+
+ execProvider := &executableSubjectProvider{}
+ execProvider.Command = ec.Command
+ if ec.TimeoutMillis == 0 {
+ execProvider.Timeout = executableDefaultTimeout
+ } else {
+ execProvider.Timeout = time.Duration(ec.TimeoutMillis) * time.Millisecond
+ if execProvider.Timeout < timeoutMinimum || execProvider.Timeout > timeoutMaximum {
+ return nil, fmt.Errorf("credentials: invalid `timeout_millis` field — executable timeout must be between %v and %v seconds", timeoutMinimum.Seconds(), timeoutMaximum.Seconds())
+ }
+ }
+ execProvider.OutputFile = ec.OutputFile
+ execProvider.client = o.Client
+ execProvider.opts = o
+ execProvider.env = runtimeEnvironment{}
+ return execProvider, nil
+ } else if o.CredentialSource.Certificate != nil {
+ cert := o.CredentialSource.Certificate
+ if !cert.UseDefaultCertificateConfig && cert.CertificateConfigLocation == "" {
+ return nil, errors.New("credentials: \"certificate\" object must either specify a certificate_config_location or use_default_certificate_config should be true")
+ }
+ if cert.UseDefaultCertificateConfig && cert.CertificateConfigLocation != "" {
+ return nil, errors.New("credentials: \"certificate\" object cannot specify both a certificate_config_location and use_default_certificate_config=true")
+ }
+ return &x509Provider{}, nil
+ }
+ return nil, errors.New("credentials: unable to parse credential source")
+}
+
+func getGoogHeaderValue(conf *Options, p subjectTokenProvider) string {
+ return fmt.Sprintf("gl-go/%s auth/%s google-byoid-sdk source/%s sa-impersonation/%t config-lifetime/%t",
+ goVersion(),
+ "unknown",
+ p.providerType(),
+ conf.ServiceAccountImpersonationURL != "",
+ conf.ServiceAccountImpersonationLifetimeSeconds != 0)
+}
@@ -0,0 +1,78 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccount
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "os"
+
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/credsfile"
+)
+
+const (
+ fileProviderType = "file"
+)
+
+type fileSubjectProvider struct {
+ File string
+ Format *credsfile.Format
+}
+
+func (sp *fileSubjectProvider) subjectToken(context.Context) (string, error) {
+ tokenFile, err := os.Open(sp.File)
+ if err != nil {
+ return "", fmt.Errorf("credentials: failed to open credential file %q: %w", sp.File, err)
+ }
+ defer tokenFile.Close()
+ tokenBytes, err := internal.ReadAll(tokenFile)
+ if err != nil {
+ return "", fmt.Errorf("credentials: failed to read credential file: %w", err)
+ }
+ tokenBytes = bytes.TrimSpace(tokenBytes)
+
+ if sp.Format == nil {
+ return string(tokenBytes), nil
+ }
+ switch sp.Format.Type {
+ case fileTypeJSON:
+ jsonData := make(map[string]interface{})
+ err = json.Unmarshal(tokenBytes, &jsonData)
+ if err != nil {
+ return "", fmt.Errorf("credentials: failed to unmarshal subject token file: %w", err)
+ }
+ val, ok := jsonData[sp.Format.SubjectTokenFieldName]
+ if !ok {
+ return "", errors.New("credentials: provided subject_token_field_name not found in credentials")
+ }
+ token, ok := val.(string)
+ if !ok {
+ return "", errors.New("credentials: improperly formatted subject token")
+ }
+ return token, nil
+ case fileTypeText:
+ return string(tokenBytes), nil
+ default:
+ return "", errors.New("credentials: invalid credential_source file format type: " + sp.Format.Type)
+ }
+}
+
+func (sp *fileSubjectProvider) providerType() string {
+ return fileProviderType
+}
@@ -0,0 +1,74 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccount
+
+import (
+ "runtime"
+ "strings"
+ "unicode"
+)
+
+var (
+ // version is a package internal global variable for testing purposes.
+ version = runtime.Version
+)
+
+// versionUnknown is only used when the runtime version cannot be determined.
+const versionUnknown = "UNKNOWN"
+
+// goVersion returns a Go runtime version derived from the runtime environment
+// that is modified to be suitable for reporting in a header, meaning it has no
+// whitespace. If it is unable to determine the Go runtime version, it returns
+// versionUnknown.
+func goVersion() string {
+ const develPrefix = "devel +"
+
+ s := version()
+ if strings.HasPrefix(s, develPrefix) {
+ s = s[len(develPrefix):]
+ if p := strings.IndexFunc(s, unicode.IsSpace); p >= 0 {
+ s = s[:p]
+ }
+ return s
+ } else if p := strings.IndexFunc(s, unicode.IsSpace); p >= 0 {
+ s = s[:p]
+ }
+
+ notSemverRune := func(r rune) bool {
+ return !strings.ContainsRune("0123456789.", r)
+ }
+
+ if strings.HasPrefix(s, "go1") {
+ s = s[2:]
+ var prerelease string
+ if p := strings.IndexFunc(s, notSemverRune); p >= 0 {
+ s, prerelease = s[:p], s[p:]
+ }
+ if strings.HasSuffix(s, ".") {
+ s += "0"
+ } else if strings.Count(s, ".") < 2 {
+ s += ".0"
+ }
+ if prerelease != "" {
+ // Some release candidates already have a dash in them.
+ if !strings.HasPrefix(prerelease, "-") {
+ prerelease = "-" + prerelease
+ }
+ s += prerelease
+ }
+ return s
+ }
+ return versionUnknown
+}
@@ -0,0 +1,30 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccount
+
+import "context"
+
+type programmaticProvider struct {
+ opts *RequestOptions
+ stp SubjectTokenProvider
+}
+
+func (pp *programmaticProvider) providerType() string {
+ return programmaticProviderType
+}
+
+func (pp *programmaticProvider) subjectToken(ctx context.Context) (string, error) {
+ return pp.stp.SubjectToken(ctx, pp.opts)
+}
@@ -0,0 +1,93 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccount
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/http"
+
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/credsfile"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+const (
+ fileTypeText = "text"
+ fileTypeJSON = "json"
+ urlProviderType = "url"
+ programmaticProviderType = "programmatic"
+ x509ProviderType = "x509"
+)
+
+type urlSubjectProvider struct {
+ URL string
+ Headers map[string]string
+ Format *credsfile.Format
+ Client *http.Client
+ Logger *slog.Logger
+}
+
+func (sp *urlSubjectProvider) subjectToken(ctx context.Context) (string, error) {
+ req, err := http.NewRequestWithContext(ctx, "GET", sp.URL, nil)
+ if err != nil {
+ return "", fmt.Errorf("credentials: HTTP request for URL-sourced credential failed: %w", err)
+ }
+
+ for key, val := range sp.Headers {
+ req.Header.Add(key, val)
+ }
+ sp.Logger.DebugContext(ctx, "url subject token request", "request", internallog.HTTPRequest(req, nil))
+ resp, body, err := internal.DoRequest(sp.Client, req)
+ if err != nil {
+ return "", fmt.Errorf("credentials: invalid response when retrieving subject token: %w", err)
+ }
+ sp.Logger.DebugContext(ctx, "url subject token response", "response", internallog.HTTPResponse(resp, body))
+ if c := resp.StatusCode; c < http.StatusOK || c >= http.StatusMultipleChoices {
+ return "", fmt.Errorf("credentials: status code %d: %s", c, body)
+ }
+
+ if sp.Format == nil {
+ return string(body), nil
+ }
+ switch sp.Format.Type {
+ case "json":
+ jsonData := make(map[string]interface{})
+ err = json.Unmarshal(body, &jsonData)
+ if err != nil {
+ return "", fmt.Errorf("credentials: failed to unmarshal subject token file: %w", err)
+ }
+ val, ok := jsonData[sp.Format.SubjectTokenFieldName]
+ if !ok {
+ return "", errors.New("credentials: provided subject_token_field_name not found in credentials")
+ }
+ token, ok := val.(string)
+ if !ok {
+ return "", errors.New("credentials: improperly formatted subject token")
+ }
+ return token, nil
+ case fileTypeText:
+ return string(body), nil
+ default:
+ return "", errors.New("credentials: invalid credential_source file format type: " + sp.Format.Type)
+ }
+}
+
+func (sp *urlSubjectProvider) providerType() string {
+ return urlProviderType
+}
@@ -0,0 +1,63 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccount
+
+import (
+ "context"
+ "crypto/tls"
+ "net/http"
+ "time"
+
+ "cloud.google.com/go/auth/internal/transport/cert"
+)
+
+// x509Provider implements the subjectTokenProvider type for
+// x509 workload identity credentials. Because x509 credentials
+// rely on an mTLS connection to represent the 3rd party identity
+// rather than a subject token, this provider will always return
+// an empty string when a subject token is requested by the external account
+// token provider.
+type x509Provider struct {
+}
+
+func (xp *x509Provider) providerType() string {
+ return x509ProviderType
+}
+
+func (xp *x509Provider) subjectToken(ctx context.Context) (string, error) {
+ return "", nil
+}
+
+// createX509Client creates a new client that is configured with mTLS, using the
+// certificate configuration specified in the credential source.
+func createX509Client(certificateConfigLocation string) (*http.Client, error) {
+ certProvider, err := cert.NewWorkloadX509CertProvider(certificateConfigLocation)
+ if err != nil {
+ return nil, err
+ }
+ trans := http.DefaultTransport.(*http.Transport).Clone()
+
+ trans.TLSClientConfig = &tls.Config{
+ GetClientCertificate: certProvider,
+ }
+
+ // Create a client with default settings plus the X509 workload cert and key.
+ client := &http.Client{
+ Transport: trans,
+ Timeout: 30 * time.Second,
+ }
+
+ return client, nil
+}
@@ -0,0 +1,115 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package externalaccountuser
+
+import (
+ "context"
+ "errors"
+ "log/slog"
+ "net/http"
+ "time"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/credentials/internal/stsexchange"
+ "cloud.google.com/go/auth/internal"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+// Options stores the configuration for fetching tokens with external authorized
+// user credentials.
+type Options struct {
+ // Audience is the Secure Token Service (STS) audience which contains the
+ // resource name for the workforce pool and the provider identifier in that
+ // pool.
+ Audience string
+ // RefreshToken is the OAuth 2.0 refresh token.
+ RefreshToken string
+ // TokenURL is the STS token exchange endpoint for refresh.
+ TokenURL string
+ // TokenInfoURL is the STS endpoint URL for token introspection. Optional.
+ TokenInfoURL string
+ // ClientID is only required in conjunction with ClientSecret, as described
+ // below.
+ ClientID string
+ // ClientSecret is currently only required if token_info endpoint also needs
+ // to be called with the generated a cloud access token. When provided, STS
+ // will be called with additional basic authentication using client_id as
+ // username and client_secret as password.
+ ClientSecret string
+ // Scopes contains the desired scopes for the returned access token.
+ Scopes []string
+
+ // Client for token request.
+ Client *http.Client
+ // Logger for logging.
+ Logger *slog.Logger
+}
+
+func (c *Options) validate() bool {
+ return c.ClientID != "" && c.ClientSecret != "" && c.RefreshToken != "" && c.TokenURL != ""
+}
+
+// NewTokenProvider returns a [cloud.google.com/go/auth.TokenProvider]
+// configured with the provided options.
+func NewTokenProvider(opts *Options) (auth.TokenProvider, error) {
+ if !opts.validate() {
+ return nil, errors.New("credentials: invalid external_account_authorized_user configuration")
+ }
+
+ tp := &tokenProvider{
+ o: opts,
+ }
+ return auth.NewCachedTokenProvider(tp, nil), nil
+}
+
+type tokenProvider struct {
+ o *Options
+}
+
+func (tp *tokenProvider) Token(ctx context.Context) (*auth.Token, error) {
+ opts := tp.o
+
+ clientAuth := stsexchange.ClientAuthentication{
+ AuthStyle: auth.StyleInHeader,
+ ClientID: opts.ClientID,
+ ClientSecret: opts.ClientSecret,
+ }
+ headers := make(http.Header)
+ headers.Set("Content-Type", "application/x-www-form-urlencoded")
+ stsResponse, err := stsexchange.RefreshAccessToken(ctx, &stsexchange.Options{
+ Client: opts.Client,
+ Endpoint: opts.TokenURL,
+ RefreshToken: opts.RefreshToken,
+ Authentication: clientAuth,
+ Headers: headers,
+ Logger: internallog.New(tp.o.Logger),
+ })
+ if err != nil {
+ return nil, err
+ }
+ if stsResponse.ExpiresIn < 0 {
+ return nil, errors.New("credentials: invalid expiry from security token service")
+ }
+
+ // guarded by the wrapping with CachedTokenProvider
+ if stsResponse.RefreshToken != "" {
+ opts.RefreshToken = stsResponse.RefreshToken
+ }
+ return &auth.Token{
+ Value: stsResponse.AccessToken,
+ Expiry: time.Now().UTC().Add(time.Duration(stsResponse.ExpiresIn) * time.Second),
+ Type: internal.TokenTypeBearer,
+ }, nil
+}
@@ -0,0 +1,191 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package gdch
+
+import (
+ "context"
+ "crypto"
+ "crypto/tls"
+ "crypto/x509"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/url"
+ "os"
+ "strings"
+ "time"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/credsfile"
+ "cloud.google.com/go/auth/internal/jwt"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+const (
+ // GrantType is the grant type for the token request.
+ GrantType = "urn:ietf:params:oauth:token-type:token-exchange"
+ requestTokenType = "urn:ietf:params:oauth:token-type:access_token"
+ subjectTokenType = "urn:k8s:params:oauth:token-type:serviceaccount"
+)
+
+var (
+ gdchSupportFormatVersions map[string]bool = map[string]bool{
+ "1": true,
+ }
+)
+
+// Options for [NewTokenProvider].
+type Options struct {
+ STSAudience string
+ Client *http.Client
+ Logger *slog.Logger
+}
+
+// NewTokenProvider returns a [cloud.google.com/go/auth.TokenProvider] from a
+// GDCH cred file.
+func NewTokenProvider(f *credsfile.GDCHServiceAccountFile, o *Options) (auth.TokenProvider, error) {
+ if !gdchSupportFormatVersions[f.FormatVersion] {
+ return nil, fmt.Errorf("credentials: unsupported gdch_service_account format %q", f.FormatVersion)
+ }
+ if o.STSAudience == "" {
+ return nil, errors.New("credentials: STSAudience must be set for the GDCH auth flows")
+ }
+ signer, err := internal.ParseKey([]byte(f.PrivateKey))
+ if err != nil {
+ return nil, err
+ }
+ certPool, err := loadCertPool(f.CertPath)
+ if err != nil {
+ return nil, err
+ }
+
+ tp := gdchProvider{
+ serviceIdentity: fmt.Sprintf("system:serviceaccount:%s:%s", f.Project, f.Name),
+ tokenURL: f.TokenURL,
+ aud: o.STSAudience,
+ signer: signer,
+ pkID: f.PrivateKeyID,
+ certPool: certPool,
+ client: o.Client,
+ logger: internallog.New(o.Logger),
+ }
+ return tp, nil
+}
+
+func loadCertPool(path string) (*x509.CertPool, error) {
+ pool := x509.NewCertPool()
+ pem, err := os.ReadFile(path)
+ if err != nil {
+ return nil, fmt.Errorf("credentials: failed to read certificate: %w", err)
+ }
+ pool.AppendCertsFromPEM(pem)
+ return pool, nil
+}
+
+type gdchProvider struct {
+ serviceIdentity string
+ tokenURL string
+ aud string
+ signer crypto.Signer
+ pkID string
+ certPool *x509.CertPool
+
+ client *http.Client
+ logger *slog.Logger
+}
+
+func (g gdchProvider) Token(ctx context.Context) (*auth.Token, error) {
+ addCertToTransport(g.client, g.certPool)
+ iat := time.Now()
+ exp := iat.Add(time.Hour)
+ claims := jwt.Claims{
+ Iss: g.serviceIdentity,
+ Sub: g.serviceIdentity,
+ Aud: g.tokenURL,
+ Iat: iat.Unix(),
+ Exp: exp.Unix(),
+ }
+ h := jwt.Header{
+ Algorithm: jwt.HeaderAlgRSA256,
+ Type: jwt.HeaderType,
+ KeyID: string(g.pkID),
+ }
+ payload, err := jwt.EncodeJWS(&h, &claims, g.signer)
+ if err != nil {
+ return nil, err
+ }
+ v := url.Values{}
+ v.Set("grant_type", GrantType)
+ v.Set("audience", g.aud)
+ v.Set("requested_token_type", requestTokenType)
+ v.Set("subject_token", payload)
+ v.Set("subject_token_type", subjectTokenType)
+
+ req, err := http.NewRequestWithContext(ctx, "POST", g.tokenURL, strings.NewReader(v.Encode()))
+ if err != nil {
+ return nil, err
+ }
+ req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
+ g.logger.DebugContext(ctx, "gdch token request", "request", internallog.HTTPRequest(req, []byte(v.Encode())))
+ resp, body, err := internal.DoRequest(g.client, req)
+ if err != nil {
+ return nil, fmt.Errorf("credentials: cannot fetch token: %w", err)
+ }
+ g.logger.DebugContext(ctx, "gdch token response", "response", internallog.HTTPResponse(resp, body))
+ if c := resp.StatusCode; c < http.StatusOK || c > http.StatusMultipleChoices {
+ return nil, &auth.Error{
+ Response: resp,
+ Body: body,
+ }
+ }
+
+ var tokenRes struct {
+ AccessToken string `json:"access_token"`
+ TokenType string `json:"token_type"`
+ ExpiresIn int64 `json:"expires_in"` // relative seconds from now
+ }
+ if err := json.Unmarshal(body, &tokenRes); err != nil {
+ return nil, fmt.Errorf("credentials: cannot fetch token: %w", err)
+ }
+ token := &auth.Token{
+ Value: tokenRes.AccessToken,
+ Type: tokenRes.TokenType,
+ }
+ raw := make(map[string]interface{})
+ json.Unmarshal(body, &raw) // no error checks for optional fields
+ token.Metadata = raw
+
+ if secs := tokenRes.ExpiresIn; secs > 0 {
+ token.Expiry = time.Now().Add(time.Duration(secs) * time.Second)
+ }
+ return token, nil
+}
+
+// addCertToTransport makes a best effort attempt at adding in the cert info to
+// the client. It tries to keep all configured transport settings if the
+// underlying transport is an http.Transport. Or else it overwrites the
+// transport with defaults adding in the certs.
+func addCertToTransport(hc *http.Client, certPool *x509.CertPool) {
+ trans, ok := hc.Transport.(*http.Transport)
+ if !ok {
+ trans = http.DefaultTransport.(*http.Transport).Clone()
+ }
+ trans.TLSClientConfig = &tls.Config{
+ RootCAs: certPool,
+ }
+}
@@ -0,0 +1,156 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package impersonate
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "time"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/internal"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+const (
+ defaultTokenLifetime = "3600s"
+ authHeaderKey = "Authorization"
+)
+
+// generateAccesstokenReq is used for service account impersonation
+type generateAccessTokenReq struct {
+ Delegates []string `json:"delegates,omitempty"`
+ Lifetime string `json:"lifetime,omitempty"`
+ Scope []string `json:"scope,omitempty"`
+}
+
+type impersonateTokenResponse struct {
+ AccessToken string `json:"accessToken"`
+ ExpireTime string `json:"expireTime"`
+}
+
+// NewTokenProvider uses a source credential, stored in Ts, to request an access token to the provided URL.
+// Scopes can be defined when the access token is requested.
+func NewTokenProvider(opts *Options) (auth.TokenProvider, error) {
+ if err := opts.validate(); err != nil {
+ return nil, err
+ }
+ return opts, nil
+}
+
+// Options for [NewTokenProvider].
+type Options struct {
+ // Tp is the source credential used to generate a token on the
+ // impersonated service account. Required.
+ Tp auth.TokenProvider
+
+ // URL is the endpoint to call to generate a token
+ // on behalf of the service account. Required.
+ URL string
+ // Scopes that the impersonated credential should have. Required.
+ Scopes []string
+ // Delegates are the service account email addresses in a delegation chain.
+ // Each service account must be granted roles/iam.serviceAccountTokenCreator
+ // on the next service account in the chain. Optional.
+ Delegates []string
+ // TokenLifetimeSeconds is the number of seconds the impersonation token will
+ // be valid for. Defaults to 1 hour if unset. Optional.
+ TokenLifetimeSeconds int
+ // Client configures the underlying client used to make network requests
+ // when fetching tokens. Required.
+ Client *http.Client
+ // Logger is used for debug logging. If provided, logging will be enabled
+ // at the loggers configured level. By default logging is disabled unless
+ // enabled by setting GOOGLE_SDK_GO_LOGGING_LEVEL in which case a default
+ // logger will be used. Optional.
+ Logger *slog.Logger
+}
+
+func (o *Options) validate() error {
+ if o.Tp == nil {
+ return errors.New("credentials: missing required 'source_credentials' field in impersonated credentials")
+ }
+ if o.URL == "" {
+ return errors.New("credentials: missing required 'service_account_impersonation_url' field in impersonated credentials")
+ }
+ return nil
+}
+
+// Token performs the exchange to get a temporary service account token to allow access to GCP.
+func (o *Options) Token(ctx context.Context) (*auth.Token, error) {
+ logger := internallog.New(o.Logger)
+ lifetime := defaultTokenLifetime
+ if o.TokenLifetimeSeconds != 0 {
+ lifetime = fmt.Sprintf("%ds", o.TokenLifetimeSeconds)
+ }
+ reqBody := generateAccessTokenReq{
+ Lifetime: lifetime,
+ Scope: o.Scopes,
+ Delegates: o.Delegates,
+ }
+ b, err := json.Marshal(reqBody)
+ if err != nil {
+ return nil, fmt.Errorf("credentials: unable to marshal request: %w", err)
+ }
+ req, err := http.NewRequestWithContext(ctx, "POST", o.URL, bytes.NewReader(b))
+ if err != nil {
+ return nil, fmt.Errorf("credentials: unable to create impersonation request: %w", err)
+ }
+ req.Header.Set("Content-Type", "application/json")
+ if err := setAuthHeader(ctx, o.Tp, req); err != nil {
+ return nil, err
+ }
+ logger.DebugContext(ctx, "impersonated token request", "request", internallog.HTTPRequest(req, b))
+ resp, body, err := internal.DoRequest(o.Client, req)
+ if err != nil {
+ return nil, fmt.Errorf("credentials: unable to generate access token: %w", err)
+ }
+ logger.DebugContext(ctx, "impersonated token response", "response", internallog.HTTPResponse(resp, body))
+ if c := resp.StatusCode; c < http.StatusOK || c >= http.StatusMultipleChoices {
+ return nil, fmt.Errorf("credentials: status code %d: %s", c, body)
+ }
+
+ var accessTokenResp impersonateTokenResponse
+ if err := json.Unmarshal(body, &accessTokenResp); err != nil {
+ return nil, fmt.Errorf("credentials: unable to parse response: %w", err)
+ }
+ expiry, err := time.Parse(time.RFC3339, accessTokenResp.ExpireTime)
+ if err != nil {
+ return nil, fmt.Errorf("credentials: unable to parse expiry: %w", err)
+ }
+ return &auth.Token{
+ Value: accessTokenResp.AccessToken,
+ Expiry: expiry,
+ Type: internal.TokenTypeBearer,
+ }, nil
+}
+
+func setAuthHeader(ctx context.Context, tp auth.TokenProvider, r *http.Request) error {
+ t, err := tp.Token(ctx)
+ if err != nil {
+ return err
+ }
+ typ := t.Type
+ if typ == "" {
+ typ = internal.TokenTypeBearer
+ }
+ r.Header.Set(authHeaderKey, typ+" "+t.Value)
+ return nil
+}
@@ -0,0 +1,167 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package stsexchange
+
+import (
+ "context"
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/internal"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+const (
+ // GrantType for a sts exchange.
+ GrantType = "urn:ietf:params:oauth:grant-type:token-exchange"
+ // TokenType for a sts exchange.
+ TokenType = "urn:ietf:params:oauth:token-type:access_token"
+
+ jwtTokenType = "urn:ietf:params:oauth:token-type:jwt"
+)
+
+// Options stores the configuration for making an sts exchange request.
+type Options struct {
+ Client *http.Client
+ Logger *slog.Logger
+ Endpoint string
+ Request *TokenRequest
+ Authentication ClientAuthentication
+ Headers http.Header
+ // ExtraOpts are optional fields marshalled into the `options` field of the
+ // request body.
+ ExtraOpts map[string]interface{}
+ RefreshToken string
+}
+
+// RefreshAccessToken performs the token exchange using a refresh token flow.
+func RefreshAccessToken(ctx context.Context, opts *Options) (*TokenResponse, error) {
+ data := url.Values{}
+ data.Set("grant_type", "refresh_token")
+ data.Set("refresh_token", opts.RefreshToken)
+ return doRequest(ctx, opts, data)
+}
+
+// ExchangeToken performs an oauth2 token exchange with the provided endpoint.
+func ExchangeToken(ctx context.Context, opts *Options) (*TokenResponse, error) {
+ data := url.Values{}
+ data.Set("audience", opts.Request.Audience)
+ data.Set("grant_type", GrantType)
+ data.Set("requested_token_type", TokenType)
+ data.Set("subject_token_type", opts.Request.SubjectTokenType)
+ data.Set("subject_token", opts.Request.SubjectToken)
+ data.Set("scope", strings.Join(opts.Request.Scope, " "))
+ if opts.ExtraOpts != nil {
+ opts, err := json.Marshal(opts.ExtraOpts)
+ if err != nil {
+ return nil, fmt.Errorf("credentials: failed to marshal additional options: %w", err)
+ }
+ data.Set("options", string(opts))
+ }
+ return doRequest(ctx, opts, data)
+}
+
+func doRequest(ctx context.Context, opts *Options, data url.Values) (*TokenResponse, error) {
+ opts.Authentication.InjectAuthentication(data, opts.Headers)
+ encodedData := data.Encode()
+ logger := internallog.New(opts.Logger)
+
+ req, err := http.NewRequestWithContext(ctx, "POST", opts.Endpoint, strings.NewReader(encodedData))
+ if err != nil {
+ return nil, fmt.Errorf("credentials: failed to properly build http request: %w", err)
+
+ }
+ for key, list := range opts.Headers {
+ for _, val := range list {
+ req.Header.Add(key, val)
+ }
+ }
+ req.Header.Set("Content-Length", strconv.Itoa(len(encodedData)))
+
+ logger.DebugContext(ctx, "sts token request", "request", internallog.HTTPRequest(req, []byte(encodedData)))
+ resp, body, err := internal.DoRequest(opts.Client, req)
+ if err != nil {
+ return nil, fmt.Errorf("credentials: invalid response from Secure Token Server: %w", err)
+ }
+ logger.DebugContext(ctx, "sts token response", "response", internallog.HTTPResponse(resp, body))
+ if c := resp.StatusCode; c < http.StatusOK || c > http.StatusMultipleChoices {
+ return nil, fmt.Errorf("credentials: status code %d: %s", c, body)
+ }
+ var stsResp TokenResponse
+ if err := json.Unmarshal(body, &stsResp); err != nil {
+ return nil, fmt.Errorf("credentials: failed to unmarshal response body from Secure Token Server: %w", err)
+ }
+
+ return &stsResp, nil
+}
+
+// TokenRequest contains fields necessary to make an oauth2 token
+// exchange.
+type TokenRequest struct {
+ ActingParty struct {
+ ActorToken string
+ ActorTokenType string
+ }
+ GrantType string
+ Resource string
+ Audience string
+ Scope []string
+ RequestedTokenType string
+ SubjectToken string
+ SubjectTokenType string
+}
+
+// TokenResponse is used to decode the remote server response during
+// an oauth2 token exchange.
+type TokenResponse struct {
+ AccessToken string `json:"access_token"`
+ IssuedTokenType string `json:"issued_token_type"`
+ TokenType string `json:"token_type"`
+ ExpiresIn int `json:"expires_in"`
+ Scope string `json:"scope"`
+ RefreshToken string `json:"refresh_token"`
+}
+
+// ClientAuthentication represents an OAuth client ID and secret and the
+// mechanism for passing these credentials as stated in rfc6749#2.3.1.
+type ClientAuthentication struct {
+ AuthStyle auth.Style
+ ClientID string
+ ClientSecret string
+}
+
+// InjectAuthentication is used to add authentication to a Secure Token Service
+// exchange request. It modifies either the passed url.Values or http.Header
+// depending on the desired authentication format.
+func (c *ClientAuthentication) InjectAuthentication(values url.Values, headers http.Header) {
+ if c.ClientID == "" || c.ClientSecret == "" || values == nil || headers == nil {
+ return
+ }
+ switch c.AuthStyle {
+ case auth.StyleInHeader:
+ plainHeader := c.ClientID + ":" + c.ClientSecret
+ headers.Set("Authorization", "Basic "+base64.StdEncoding.EncodeToString([]byte(plainHeader)))
+ default:
+ values.Set("client_id", c.ClientID)
+ values.Set("client_secret", c.ClientSecret)
+ }
+}
@@ -0,0 +1,89 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package credentials
+
+import (
+ "context"
+ "crypto"
+ "errors"
+ "fmt"
+ "log/slog"
+ "strings"
+ "time"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/credsfile"
+ "cloud.google.com/go/auth/internal/jwt"
+)
+
+var (
+ // for testing
+ now func() time.Time = time.Now
+)
+
+// configureSelfSignedJWT uses the private key in the service account to create
+// a JWT without making a network call.
+func configureSelfSignedJWT(f *credsfile.ServiceAccountFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ if len(opts.scopes()) == 0 && opts.Audience == "" {
+ return nil, errors.New("credentials: both scopes and audience are empty")
+ }
+ signer, err := internal.ParseKey([]byte(f.PrivateKey))
+ if err != nil {
+ return nil, fmt.Errorf("credentials: could not parse key: %w", err)
+ }
+ return &selfSignedTokenProvider{
+ email: f.ClientEmail,
+ audience: opts.Audience,
+ scopes: opts.scopes(),
+ signer: signer,
+ pkID: f.PrivateKeyID,
+ logger: opts.logger(),
+ }, nil
+}
+
+type selfSignedTokenProvider struct {
+ email string
+ audience string
+ scopes []string
+ signer crypto.Signer
+ pkID string
+ logger *slog.Logger
+}
+
+func (tp *selfSignedTokenProvider) Token(context.Context) (*auth.Token, error) {
+ iat := now()
+ exp := iat.Add(time.Hour)
+ scope := strings.Join(tp.scopes, " ")
+ c := &jwt.Claims{
+ Iss: tp.email,
+ Sub: tp.email,
+ Aud: tp.audience,
+ Scope: scope,
+ Iat: iat.Unix(),
+ Exp: exp.Unix(),
+ }
+ h := &jwt.Header{
+ Algorithm: jwt.HeaderAlgRSA256,
+ Type: jwt.HeaderType,
+ KeyID: string(tp.pkID),
+ }
+ tok, err := jwt.EncodeJWS(h, c, tp.signer)
+ if err != nil {
+ return nil, fmt.Errorf("credentials: could not encode JWT: %w", err)
+ }
+ tp.logger.Debug("created self-signed JWT", "token", tok)
+ return &auth.Token{Value: tok, Type: internal.TokenTypeBearer, Expiry: exp}, nil
+}
@@ -0,0 +1,247 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package httptransport provides functionality for managing HTTP client
+// connections to Google Cloud services.
+package httptransport
+
+import (
+ "crypto/tls"
+ "errors"
+ "fmt"
+ "log/slog"
+ "net/http"
+
+ "cloud.google.com/go/auth"
+ detect "cloud.google.com/go/auth/credentials"
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/transport"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+// ClientCertProvider is a function that returns a TLS client certificate to be
+// used when opening TLS connections. It follows the same semantics as
+// [crypto/tls.Config.GetClientCertificate].
+type ClientCertProvider = func(*tls.CertificateRequestInfo) (*tls.Certificate, error)
+
+// Options used to configure a [net/http.Client] from [NewClient].
+type Options struct {
+ // DisableTelemetry disables default telemetry (OpenTelemetry). An example
+ // reason to do so would be to bind custom telemetry that overrides the
+ // defaults.
+ DisableTelemetry bool
+ // DisableAuthentication specifies that no authentication should be used. It
+ // is suitable only for testing and for accessing public resources, like
+ // public Google Cloud Storage buckets.
+ DisableAuthentication bool
+ // Headers are extra HTTP headers that will be appended to every outgoing
+ // request.
+ Headers http.Header
+ // BaseRoundTripper overrides the base transport used for serving requests.
+ // If specified ClientCertProvider is ignored.
+ BaseRoundTripper http.RoundTripper
+ // Endpoint overrides the default endpoint to be used for a service.
+ Endpoint string
+ // APIKey specifies an API key to be used as the basis for authentication.
+ // If set DetectOpts are ignored.
+ APIKey string
+ // Credentials used to add Authorization header to all requests. If set
+ // DetectOpts are ignored.
+ Credentials *auth.Credentials
+ // ClientCertProvider is a function that returns a TLS client certificate to
+ // be used when opening TLS connections. It follows the same semantics as
+ // crypto/tls.Config.GetClientCertificate.
+ ClientCertProvider ClientCertProvider
+ // DetectOpts configures settings for detect Application Default
+ // Credentials.
+ DetectOpts *detect.DetectOptions
+ // UniverseDomain is the default service domain for a given Cloud universe.
+ // The default value is "googleapis.com". This is the universe domain
+ // configured for the client, which will be compared to the universe domain
+ // that is separately configured for the credentials.
+ UniverseDomain string
+ // Logger is used for debug logging. If provided, logging will be enabled
+ // at the loggers configured level. By default logging is disabled unless
+ // enabled by setting GOOGLE_SDK_GO_LOGGING_LEVEL in which case a default
+ // logger will be used. Optional.
+ Logger *slog.Logger
+
+ // InternalOptions are NOT meant to be set directly by consumers of this
+ // package, they should only be set by generated client code.
+ InternalOptions *InternalOptions
+}
+
+func (o *Options) validate() error {
+ if o == nil {
+ return errors.New("httptransport: opts required to be non-nil")
+ }
+ if o.InternalOptions != nil && o.InternalOptions.SkipValidation {
+ return nil
+ }
+ hasCreds := o.APIKey != "" ||
+ o.Credentials != nil ||
+ (o.DetectOpts != nil && len(o.DetectOpts.CredentialsJSON) > 0) ||
+ (o.DetectOpts != nil && o.DetectOpts.CredentialsFile != "")
+ if o.DisableAuthentication && hasCreds {
+ return errors.New("httptransport: DisableAuthentication is incompatible with options that set or detect credentials")
+ }
+ return nil
+}
+
+// client returns the client a user set for the detect options or nil if one was
+// not set.
+func (o *Options) client() *http.Client {
+ if o.DetectOpts != nil && o.DetectOpts.Client != nil {
+ return o.DetectOpts.Client
+ }
+ return nil
+}
+
+func (o *Options) logger() *slog.Logger {
+ return internallog.New(o.Logger)
+}
+
+func (o *Options) resolveDetectOptions() *detect.DetectOptions {
+ io := o.InternalOptions
+ // soft-clone these so we are not updating a ref the user holds and may reuse
+ do := transport.CloneDetectOptions(o.DetectOpts)
+
+ // If scoped JWTs are enabled user provided an aud, allow self-signed JWT.
+ if (io != nil && io.EnableJWTWithScope) || do.Audience != "" {
+ do.UseSelfSignedJWT = true
+ }
+ // Only default scopes if user did not also set an audience.
+ if len(do.Scopes) == 0 && do.Audience == "" && io != nil && len(io.DefaultScopes) > 0 {
+ do.Scopes = make([]string, len(io.DefaultScopes))
+ copy(do.Scopes, io.DefaultScopes)
+ }
+ if len(do.Scopes) == 0 && do.Audience == "" && io != nil {
+ do.Audience = o.InternalOptions.DefaultAudience
+ }
+ if o.ClientCertProvider != nil {
+ tlsConfig := &tls.Config{
+ GetClientCertificate: o.ClientCertProvider,
+ }
+ do.Client = transport.DefaultHTTPClientWithTLS(tlsConfig)
+ do.TokenURL = detect.GoogleMTLSTokenURL
+ }
+ if do.Logger == nil {
+ do.Logger = o.logger()
+ }
+ return do
+}
+
+// InternalOptions are only meant to be set by generated client code. These are
+// not meant to be set directly by consumers of this package. Configuration in
+// this type is considered EXPERIMENTAL and may be removed at any time in the
+// future without warning.
+type InternalOptions struct {
+ // EnableJWTWithScope specifies if scope can be used with self-signed JWT.
+ EnableJWTWithScope bool
+ // DefaultAudience specifies a default audience to be used as the audience
+ // field ("aud") for the JWT token authentication.
+ DefaultAudience string
+ // DefaultEndpointTemplate combined with UniverseDomain specifies the
+ // default endpoint.
+ DefaultEndpointTemplate string
+ // DefaultMTLSEndpoint specifies the default mTLS endpoint.
+ DefaultMTLSEndpoint string
+ // DefaultScopes specifies the default OAuth2 scopes to be used for a
+ // service.
+ DefaultScopes []string
+ // SkipValidation bypasses validation on Options. It should only be used
+ // internally for clients that need more control over their transport.
+ SkipValidation bool
+ // SkipUniverseDomainValidation skips the verification that the universe
+ // domain configured for the client matches the universe domain configured
+ // for the credentials. It should only be used internally for clients that
+ // need more control over their transport. The default is false.
+ SkipUniverseDomainValidation bool
+}
+
+// AddAuthorizationMiddleware adds a middleware to the provided client's
+// transport that sets the Authorization header with the value produced by the
+// provided [cloud.google.com/go/auth.Credentials]. An error is returned only
+// if client or creds is nil.
+//
+// This function does not support setting a universe domain value on the client.
+func AddAuthorizationMiddleware(client *http.Client, creds *auth.Credentials) error {
+ if client == nil || creds == nil {
+ return fmt.Errorf("httptransport: client and tp must not be nil")
+ }
+ base := client.Transport
+ if base == nil {
+ if dt, ok := http.DefaultTransport.(*http.Transport); ok {
+ base = dt.Clone()
+ } else {
+ // Directly reuse the DefaultTransport if the application has
+ // replaced it with an implementation of RoundTripper other than
+ // http.Transport.
+ base = http.DefaultTransport
+ }
+ }
+ client.Transport = &authTransport{
+ creds: creds,
+ base: base,
+ }
+ return nil
+}
+
+// NewClient returns a [net/http.Client] that can be used to communicate with a
+// Google cloud service, configured with the provided [Options]. It
+// automatically appends Authorization headers to all outgoing requests.
+func NewClient(opts *Options) (*http.Client, error) {
+ if err := opts.validate(); err != nil {
+ return nil, err
+ }
+
+ tOpts := &transport.Options{
+ Endpoint: opts.Endpoint,
+ ClientCertProvider: opts.ClientCertProvider,
+ Client: opts.client(),
+ UniverseDomain: opts.UniverseDomain,
+ Logger: opts.logger(),
+ }
+ if io := opts.InternalOptions; io != nil {
+ tOpts.DefaultEndpointTemplate = io.DefaultEndpointTemplate
+ tOpts.DefaultMTLSEndpoint = io.DefaultMTLSEndpoint
+ }
+ clientCertProvider, dialTLSContext, err := transport.GetHTTPTransportConfig(tOpts)
+ if err != nil {
+ return nil, err
+ }
+ baseRoundTripper := opts.BaseRoundTripper
+ if baseRoundTripper == nil {
+ baseRoundTripper = defaultBaseTransport(clientCertProvider, dialTLSContext)
+ }
+ // Ensure the token exchange transport uses the same ClientCertProvider as the API transport.
+ opts.ClientCertProvider = clientCertProvider
+ trans, err := newTransport(baseRoundTripper, opts)
+ if err != nil {
+ return nil, err
+ }
+ return &http.Client{
+ Transport: trans,
+ }, nil
+}
+
+// SetAuthHeader uses the provided token to set the Authorization header on a
+// request. If the token.Type is empty, the type is assumed to be Bearer.
+func SetAuthHeader(token *auth.Token, req *http.Request) {
+ typ := token.Type
+ if typ == "" {
+ typ = internal.TokenTypeBearer
+ }
+ req.Header.Set("Authorization", typ+" "+token.Value)
+}
@@ -0,0 +1,234 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package httptransport
+
+import (
+ "context"
+ "crypto/tls"
+ "net"
+ "net/http"
+ "os"
+ "time"
+
+ "cloud.google.com/go/auth"
+ "cloud.google.com/go/auth/credentials"
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/transport"
+ "cloud.google.com/go/auth/internal/transport/cert"
+ "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp"
+ "golang.org/x/net/http2"
+)
+
+const (
+ quotaProjectHeaderKey = "X-goog-user-project"
+)
+
+func newTransport(base http.RoundTripper, opts *Options) (http.RoundTripper, error) {
+ var headers = opts.Headers
+ ht := &headerTransport{
+ base: base,
+ headers: headers,
+ }
+ var trans http.RoundTripper = ht
+ trans = addOpenTelemetryTransport(trans, opts)
+ switch {
+ case opts.DisableAuthentication:
+ // Do nothing.
+ case opts.APIKey != "":
+ qp := internal.GetQuotaProject(nil, opts.Headers.Get(quotaProjectHeaderKey))
+ if qp != "" {
+ if headers == nil {
+ headers = make(map[string][]string, 1)
+ }
+ headers.Set(quotaProjectHeaderKey, qp)
+ }
+ trans = &apiKeyTransport{
+ Transport: trans,
+ Key: opts.APIKey,
+ }
+ default:
+ var creds *auth.Credentials
+ if opts.Credentials != nil {
+ creds = opts.Credentials
+ } else {
+ var err error
+ creds, err = credentials.DetectDefault(opts.resolveDetectOptions())
+ if err != nil {
+ return nil, err
+ }
+ }
+ qp, err := creds.QuotaProjectID(context.Background())
+ if err != nil {
+ return nil, err
+ }
+ if qp != "" {
+ if headers == nil {
+ headers = make(map[string][]string, 1)
+ }
+ // Don't overwrite user specified quota
+ if v := headers.Get(quotaProjectHeaderKey); v == "" {
+ headers.Set(quotaProjectHeaderKey, qp)
+ }
+ }
+ var skipUD bool
+ if iOpts := opts.InternalOptions; iOpts != nil {
+ skipUD = iOpts.SkipUniverseDomainValidation
+ }
+ creds.TokenProvider = auth.NewCachedTokenProvider(creds.TokenProvider, nil)
+ trans = &authTransport{
+ base: trans,
+ creds: creds,
+ clientUniverseDomain: opts.UniverseDomain,
+ skipUniverseDomainValidation: skipUD,
+ }
+ }
+ return trans, nil
+}
+
+// defaultBaseTransport returns the base HTTP transport.
+// On App Engine, this is urlfetch.Transport.
+// Otherwise, use a default transport, taking most defaults from
+// http.DefaultTransport.
+// If TLSCertificate is available, set TLSClientConfig as well.
+func defaultBaseTransport(clientCertSource cert.Provider, dialTLSContext func(context.Context, string, string) (net.Conn, error)) http.RoundTripper {
+ defaultTransport, ok := http.DefaultTransport.(*http.Transport)
+ if !ok {
+ defaultTransport = transport.BaseTransport()
+ }
+ trans := defaultTransport.Clone()
+ trans.MaxIdleConnsPerHost = 100
+
+ if clientCertSource != nil {
+ trans.TLSClientConfig = &tls.Config{
+ GetClientCertificate: clientCertSource,
+ }
+ }
+ if dialTLSContext != nil {
+ // If DialTLSContext is set, TLSClientConfig wil be ignored
+ trans.DialTLSContext = dialTLSContext
+ }
+
+ // Configures the ReadIdleTimeout HTTP/2 option for the
+ // transport. This allows broken idle connections to be pruned more quickly,
+ // preventing the client from attempting to re-use connections that will no
+ // longer work.
+ http2Trans, err := http2.ConfigureTransports(trans)
+ if err == nil {
+ http2Trans.ReadIdleTimeout = time.Second * 31
+ }
+
+ return trans
+}
+
+type apiKeyTransport struct {
+ // Key is the API Key to set on requests.
+ Key string
+ // Transport is the underlying HTTP transport.
+ // If nil, http.DefaultTransport is used.
+ Transport http.RoundTripper
+}
+
+func (t *apiKeyTransport) RoundTrip(req *http.Request) (*http.Response, error) {
+ newReq := *req
+ args := newReq.URL.Query()
+ args.Set("key", t.Key)
+ newReq.URL.RawQuery = args.Encode()
+ return t.Transport.RoundTrip(&newReq)
+}
+
+type headerTransport struct {
+ headers http.Header
+ base http.RoundTripper
+}
+
+func (t *headerTransport) RoundTrip(req *http.Request) (*http.Response, error) {
+ rt := t.base
+ newReq := *req
+ newReq.Header = make(http.Header)
+ for k, vv := range req.Header {
+ newReq.Header[k] = vv
+ }
+
+ for k, v := range t.headers {
+ newReq.Header[k] = v
+ }
+
+ return rt.RoundTrip(&newReq)
+}
+
+func addOpenTelemetryTransport(trans http.RoundTripper, opts *Options) http.RoundTripper {
+ if opts.DisableTelemetry {
+ return trans
+ }
+ return otelhttp.NewTransport(trans)
+}
+
+type authTransport struct {
+ creds *auth.Credentials
+ base http.RoundTripper
+ clientUniverseDomain string
+ skipUniverseDomainValidation bool
+}
+
+// getClientUniverseDomain returns the default service domain for a given Cloud
+// universe, with the following precedence:
+//
+// 1. A non-empty option.WithUniverseDomain or similar client option.
+// 2. A non-empty environment variable GOOGLE_CLOUD_UNIVERSE_DOMAIN.
+// 3. The default value "googleapis.com".
+//
+// This is the universe domain configured for the client, which will be compared
+// to the universe domain that is separately configured for the credentials.
+func (t *authTransport) getClientUniverseDomain() string {
+ if t.clientUniverseDomain != "" {
+ return t.clientUniverseDomain
+ }
+ if envUD := os.Getenv(internal.UniverseDomainEnvVar); envUD != "" {
+ return envUD
+ }
+ return internal.DefaultUniverseDomain
+}
+
+// RoundTrip authorizes and authenticates the request with an
+// access token from Transport's Source. Per the RoundTripper contract we must
+// not modify the initial request, so we clone it, and we must close the body
+// on any errors that happens during our token logic.
+func (t *authTransport) RoundTrip(req *http.Request) (*http.Response, error) {
+ reqBodyClosed := false
+ if req.Body != nil {
+ defer func() {
+ if !reqBodyClosed {
+ req.Body.Close()
+ }
+ }()
+ }
+ token, err := t.creds.Token(req.Context())
+ if err != nil {
+ return nil, err
+ }
+ if !t.skipUniverseDomainValidation && token.MetadataString("auth.google.tokenSource") != "compute-metadata" {
+ credentialsUniverseDomain, err := t.creds.UniverseDomain(req.Context())
+ if err != nil {
+ return nil, err
+ }
+ if err := transport.ValidateUniverseDomain(t.getClientUniverseDomain(), credentialsUniverseDomain); err != nil {
+ return nil, err
+ }
+ }
+ req2 := req.Clone(req.Context())
+ SetAuthHeader(token, req2)
+ reqBodyClosed = true
+ return t.base.RoundTrip(req2)
+}
@@ -0,0 +1,107 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package credsfile is meant to hide implementation details from the pubic
+// surface of the detect package. It should not import any other packages in
+// this module. It is located under the main internal package so other
+// sub-packages can use these parsed types as well.
+package credsfile
+
+import (
+ "os"
+ "os/user"
+ "path/filepath"
+ "runtime"
+)
+
+const (
+ // GoogleAppCredsEnvVar is the environment variable for setting the
+ // application default credentials.
+ GoogleAppCredsEnvVar = "GOOGLE_APPLICATION_CREDENTIALS"
+ userCredsFilename = "application_default_credentials.json"
+)
+
+// CredentialType represents different credential filetypes Google credentials
+// can be.
+type CredentialType int
+
+const (
+ // UnknownCredType is an unidentified file type.
+ UnknownCredType CredentialType = iota
+ // UserCredentialsKey represents a user creds file type.
+ UserCredentialsKey
+ // ServiceAccountKey represents a service account file type.
+ ServiceAccountKey
+ // ImpersonatedServiceAccountKey represents a impersonated service account
+ // file type.
+ ImpersonatedServiceAccountKey
+ // ExternalAccountKey represents a external account file type.
+ ExternalAccountKey
+ // GDCHServiceAccountKey represents a GDCH file type.
+ GDCHServiceAccountKey
+ // ExternalAccountAuthorizedUserKey represents a external account authorized
+ // user file type.
+ ExternalAccountAuthorizedUserKey
+)
+
+// parseCredentialType returns the associated filetype based on the parsed
+// typeString provided.
+func parseCredentialType(typeString string) CredentialType {
+ switch typeString {
+ case "service_account":
+ return ServiceAccountKey
+ case "authorized_user":
+ return UserCredentialsKey
+ case "impersonated_service_account":
+ return ImpersonatedServiceAccountKey
+ case "external_account":
+ return ExternalAccountKey
+ case "external_account_authorized_user":
+ return ExternalAccountAuthorizedUserKey
+ case "gdch_service_account":
+ return GDCHServiceAccountKey
+ default:
+ return UnknownCredType
+ }
+}
+
+// GetFileNameFromEnv returns the override if provided or detects a filename
+// from the environment.
+func GetFileNameFromEnv(override string) string {
+ if override != "" {
+ return override
+ }
+ return os.Getenv(GoogleAppCredsEnvVar)
+}
+
+// GetWellKnownFileName tries to locate the filepath for the user credential
+// file based on the environment.
+func GetWellKnownFileName() string {
+ if runtime.GOOS == "windows" {
+ return filepath.Join(os.Getenv("APPDATA"), "gcloud", userCredsFilename)
+ }
+ return filepath.Join(guessUnixHomeDir(), ".config", "gcloud", userCredsFilename)
+}
+
+// guessUnixHomeDir default to checking for HOME, but not all unix systems have
+// this set, do have a fallback.
+func guessUnixHomeDir() string {
+ if v := os.Getenv("HOME"); v != "" {
+ return v
+ }
+ if u, err := user.Current(); err == nil {
+ return u.HomeDir
+ }
+ return ""
+}
@@ -0,0 +1,157 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package credsfile
+
+import (
+ "encoding/json"
+)
+
+// Config3LO is the internals of a client creds file.
+type Config3LO struct {
+ ClientID string `json:"client_id"`
+ ClientSecret string `json:"client_secret"`
+ RedirectURIs []string `json:"redirect_uris"`
+ AuthURI string `json:"auth_uri"`
+ TokenURI string `json:"token_uri"`
+}
+
+// ClientCredentialsFile representation.
+type ClientCredentialsFile struct {
+ Web *Config3LO `json:"web"`
+ Installed *Config3LO `json:"installed"`
+ UniverseDomain string `json:"universe_domain"`
+}
+
+// ServiceAccountFile representation.
+type ServiceAccountFile struct {
+ Type string `json:"type"`
+ ProjectID string `json:"project_id"`
+ PrivateKeyID string `json:"private_key_id"`
+ PrivateKey string `json:"private_key"`
+ ClientEmail string `json:"client_email"`
+ ClientID string `json:"client_id"`
+ AuthURL string `json:"auth_uri"`
+ TokenURL string `json:"token_uri"`
+ UniverseDomain string `json:"universe_domain"`
+}
+
+// UserCredentialsFile representation.
+type UserCredentialsFile struct {
+ Type string `json:"type"`
+ ClientID string `json:"client_id"`
+ ClientSecret string `json:"client_secret"`
+ QuotaProjectID string `json:"quota_project_id"`
+ RefreshToken string `json:"refresh_token"`
+ UniverseDomain string `json:"universe_domain"`
+}
+
+// ExternalAccountFile representation.
+type ExternalAccountFile struct {
+ Type string `json:"type"`
+ ClientID string `json:"client_id"`
+ ClientSecret string `json:"client_secret"`
+ Audience string `json:"audience"`
+ SubjectTokenType string `json:"subject_token_type"`
+ ServiceAccountImpersonationURL string `json:"service_account_impersonation_url"`
+ TokenURL string `json:"token_url"`
+ CredentialSource *CredentialSource `json:"credential_source,omitempty"`
+ TokenInfoURL string `json:"token_info_url"`
+ ServiceAccountImpersonation *ServiceAccountImpersonationInfo `json:"service_account_impersonation,omitempty"`
+ QuotaProjectID string `json:"quota_project_id"`
+ WorkforcePoolUserProject string `json:"workforce_pool_user_project"`
+ UniverseDomain string `json:"universe_domain"`
+}
+
+// ExternalAccountAuthorizedUserFile representation.
+type ExternalAccountAuthorizedUserFile struct {
+ Type string `json:"type"`
+ Audience string `json:"audience"`
+ ClientID string `json:"client_id"`
+ ClientSecret string `json:"client_secret"`
+ RefreshToken string `json:"refresh_token"`
+ TokenURL string `json:"token_url"`
+ TokenInfoURL string `json:"token_info_url"`
+ RevokeURL string `json:"revoke_url"`
+ QuotaProjectID string `json:"quota_project_id"`
+ UniverseDomain string `json:"universe_domain"`
+}
+
+// CredentialSource stores the information necessary to retrieve the credentials for the STS exchange.
+//
+// One field amongst File, URL, Certificate, and Executable should be filled, depending on the kind of credential in question.
+// The EnvironmentID should start with AWS if being used for an AWS credential.
+type CredentialSource struct {
+ File string `json:"file"`
+ URL string `json:"url"`
+ Headers map[string]string `json:"headers"`
+ Executable *ExecutableConfig `json:"executable,omitempty"`
+ Certificate *CertificateConfig `json:"certificate"`
+ EnvironmentID string `json:"environment_id"` // TODO: Make type for this
+ RegionURL string `json:"region_url"`
+ RegionalCredVerificationURL string `json:"regional_cred_verification_url"`
+ CredVerificationURL string `json:"cred_verification_url"`
+ IMDSv2SessionTokenURL string `json:"imdsv2_session_token_url"`
+ Format *Format `json:"format,omitempty"`
+}
+
+// Format describes the format of a [CredentialSource].
+type Format struct {
+ // Type is either "text" or "json". When not provided "text" type is assumed.
+ Type string `json:"type"`
+ // SubjectTokenFieldName is only required for JSON format. This would be "access_token" for azure.
+ SubjectTokenFieldName string `json:"subject_token_field_name"`
+}
+
+// ExecutableConfig represents the command to run for an executable
+// [CredentialSource].
+type ExecutableConfig struct {
+ Command string `json:"command"`
+ TimeoutMillis int `json:"timeout_millis"`
+ OutputFile string `json:"output_file"`
+}
+
+// CertificateConfig represents the options used to set up X509 based workload
+// [CredentialSource]
+type CertificateConfig struct {
+ UseDefaultCertificateConfig bool `json:"use_default_certificate_config"`
+ CertificateConfigLocation string `json:"certificate_config_location"`
+}
+
+// ServiceAccountImpersonationInfo has impersonation configuration.
+type ServiceAccountImpersonationInfo struct {
+ TokenLifetimeSeconds int `json:"token_lifetime_seconds"`
+}
+
+// ImpersonatedServiceAccountFile representation.
+type ImpersonatedServiceAccountFile struct {
+ Type string `json:"type"`
+ ServiceAccountImpersonationURL string `json:"service_account_impersonation_url"`
+ Delegates []string `json:"delegates"`
+ CredSource json.RawMessage `json:"source_credentials"`
+ UniverseDomain string `json:"universe_domain"`
+}
+
+// GDCHServiceAccountFile represents the Google Distributed Cloud Hosted (GDCH) service identity file.
+type GDCHServiceAccountFile struct {
+ Type string `json:"type"`
+ FormatVersion string `json:"format_version"`
+ Project string `json:"project"`
+ Name string `json:"name"`
+ CertPath string `json:"ca_cert_path"`
+ PrivateKeyID string `json:"private_key_id"`
+ PrivateKey string `json:"private_key"`
+ TokenURL string `json:"token_uri"`
+ UniverseDomain string `json:"universe_domain"`
+}
@@ -0,0 +1,98 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package credsfile
+
+import (
+ "encoding/json"
+)
+
+// ParseServiceAccount parses bytes into a [ServiceAccountFile].
+func ParseServiceAccount(b []byte) (*ServiceAccountFile, error) {
+ var f *ServiceAccountFile
+ if err := json.Unmarshal(b, &f); err != nil {
+ return nil, err
+ }
+ return f, nil
+}
+
+// ParseClientCredentials parses bytes into a
+// [credsfile.ClientCredentialsFile].
+func ParseClientCredentials(b []byte) (*ClientCredentialsFile, error) {
+ var f *ClientCredentialsFile
+ if err := json.Unmarshal(b, &f); err != nil {
+ return nil, err
+ }
+ return f, nil
+}
+
+// ParseUserCredentials parses bytes into a [UserCredentialsFile].
+func ParseUserCredentials(b []byte) (*UserCredentialsFile, error) {
+ var f *UserCredentialsFile
+ if err := json.Unmarshal(b, &f); err != nil {
+ return nil, err
+ }
+ return f, nil
+}
+
+// ParseExternalAccount parses bytes into a [ExternalAccountFile].
+func ParseExternalAccount(b []byte) (*ExternalAccountFile, error) {
+ var f *ExternalAccountFile
+ if err := json.Unmarshal(b, &f); err != nil {
+ return nil, err
+ }
+ return f, nil
+}
+
+// ParseExternalAccountAuthorizedUser parses bytes into a
+// [ExternalAccountAuthorizedUserFile].
+func ParseExternalAccountAuthorizedUser(b []byte) (*ExternalAccountAuthorizedUserFile, error) {
+ var f *ExternalAccountAuthorizedUserFile
+ if err := json.Unmarshal(b, &f); err != nil {
+ return nil, err
+ }
+ return f, nil
+}
+
+// ParseImpersonatedServiceAccount parses bytes into a
+// [ImpersonatedServiceAccountFile].
+func ParseImpersonatedServiceAccount(b []byte) (*ImpersonatedServiceAccountFile, error) {
+ var f *ImpersonatedServiceAccountFile
+ if err := json.Unmarshal(b, &f); err != nil {
+ return nil, err
+ }
+ return f, nil
+}
+
+// ParseGDCHServiceAccount parses bytes into a [GDCHServiceAccountFile].
+func ParseGDCHServiceAccount(b []byte) (*GDCHServiceAccountFile, error) {
+ var f *GDCHServiceAccountFile
+ if err := json.Unmarshal(b, &f); err != nil {
+ return nil, err
+ }
+ return f, nil
+}
+
+type fileTypeChecker struct {
+ Type string `json:"type"`
+}
+
+// ParseFileType determines the [CredentialType] based on bytes provided.
+func ParseFileType(b []byte) (CredentialType, error) {
+ var f fileTypeChecker
+ if err := json.Unmarshal(b, &f); err != nil {
+ return 0, err
+ }
+ return parseCredentialType(f.Type), nil
+}
@@ -0,0 +1,219 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package internal
+
+import (
+ "context"
+ "crypto"
+ "crypto/x509"
+ "encoding/json"
+ "encoding/pem"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "sync"
+ "time"
+
+ "cloud.google.com/go/compute/metadata"
+)
+
+const (
+ // TokenTypeBearer is the auth header prefix for bearer tokens.
+ TokenTypeBearer = "Bearer"
+
+ // QuotaProjectEnvVar is the environment variable for setting the quota
+ // project.
+ QuotaProjectEnvVar = "GOOGLE_CLOUD_QUOTA_PROJECT"
+ // UniverseDomainEnvVar is the environment variable for setting the default
+ // service domain for a given Cloud universe.
+ UniverseDomainEnvVar = "GOOGLE_CLOUD_UNIVERSE_DOMAIN"
+ projectEnvVar = "GOOGLE_CLOUD_PROJECT"
+ maxBodySize = 1 << 20
+
+ // DefaultUniverseDomain is the default value for universe domain.
+ // Universe domain is the default service domain for a given Cloud universe.
+ DefaultUniverseDomain = "googleapis.com"
+)
+
+type clonableTransport interface {
+ Clone() *http.Transport
+}
+
+// DefaultClient returns an [http.Client] with some defaults set. If
+// the current [http.DefaultTransport] is a [clonableTransport], as
+// is the case for an [*http.Transport], the clone will be used.
+// Otherwise the [http.DefaultTransport] is used directly.
+func DefaultClient() *http.Client {
+ if transport, ok := http.DefaultTransport.(clonableTransport); ok {
+ return &http.Client{
+ Transport: transport.Clone(),
+ Timeout: 30 * time.Second,
+ }
+ }
+
+ return &http.Client{
+ Transport: http.DefaultTransport,
+ Timeout: 30 * time.Second,
+ }
+}
+
+// ParseKey converts the binary contents of a private key file
+// to an crypto.Signer. It detects whether the private key is in a
+// PEM container or not. If so, it extracts the the private key
+// from PEM container before conversion. It only supports PEM
+// containers with no passphrase.
+func ParseKey(key []byte) (crypto.Signer, error) {
+ block, _ := pem.Decode(key)
+ if block != nil {
+ key = block.Bytes
+ }
+ var parsedKey crypto.PrivateKey
+ var err error
+ parsedKey, err = x509.ParsePKCS8PrivateKey(key)
+ if err != nil {
+ parsedKey, err = x509.ParsePKCS1PrivateKey(key)
+ if err != nil {
+ return nil, fmt.Errorf("private key should be a PEM or plain PKCS1 or PKCS8: %w", err)
+ }
+ }
+ parsed, ok := parsedKey.(crypto.Signer)
+ if !ok {
+ return nil, errors.New("private key is not a signer")
+ }
+ return parsed, nil
+}
+
+// GetQuotaProject retrieves quota project with precedence being: override,
+// environment variable, creds json file.
+func GetQuotaProject(b []byte, override string) string {
+ if override != "" {
+ return override
+ }
+ if env := os.Getenv(QuotaProjectEnvVar); env != "" {
+ return env
+ }
+ if b == nil {
+ return ""
+ }
+ var v struct {
+ QuotaProject string `json:"quota_project_id"`
+ }
+ if err := json.Unmarshal(b, &v); err != nil {
+ return ""
+ }
+ return v.QuotaProject
+}
+
+// GetProjectID retrieves project with precedence being: override,
+// environment variable, creds json file.
+func GetProjectID(b []byte, override string) string {
+ if override != "" {
+ return override
+ }
+ if env := os.Getenv(projectEnvVar); env != "" {
+ return env
+ }
+ if b == nil {
+ return ""
+ }
+ var v struct {
+ ProjectID string `json:"project_id"` // standard service account key
+ Project string `json:"project"` // gdch key
+ }
+ if err := json.Unmarshal(b, &v); err != nil {
+ return ""
+ }
+ if v.ProjectID != "" {
+ return v.ProjectID
+ }
+ return v.Project
+}
+
+// DoRequest executes the provided req with the client. It reads the response
+// body, closes it, and returns it.
+func DoRequest(client *http.Client, req *http.Request) (*http.Response, []byte, error) {
+ resp, err := client.Do(req)
+ if err != nil {
+ return nil, nil, err
+ }
+ defer resp.Body.Close()
+ body, err := ReadAll(io.LimitReader(resp.Body, maxBodySize))
+ if err != nil {
+ return nil, nil, err
+ }
+ return resp, body, nil
+}
+
+// ReadAll consumes the whole reader and safely reads the content of its body
+// with some overflow protection.
+func ReadAll(r io.Reader) ([]byte, error) {
+ return io.ReadAll(io.LimitReader(r, maxBodySize))
+}
+
+// StaticCredentialsProperty is a helper for creating static credentials
+// properties.
+func StaticCredentialsProperty(s string) StaticProperty {
+ return StaticProperty(s)
+}
+
+// StaticProperty always returns that value of the underlying string.
+type StaticProperty string
+
+// GetProperty loads the properly value provided the given context.
+func (p StaticProperty) GetProperty(context.Context) (string, error) {
+ return string(p), nil
+}
+
+// ComputeUniverseDomainProvider fetches the credentials universe domain from
+// the google cloud metadata service.
+type ComputeUniverseDomainProvider struct {
+ MetadataClient *metadata.Client
+ universeDomainOnce sync.Once
+ universeDomain string
+ universeDomainErr error
+}
+
+// GetProperty fetches the credentials universe domain from the google cloud
+// metadata service.
+func (c *ComputeUniverseDomainProvider) GetProperty(ctx context.Context) (string, error) {
+ c.universeDomainOnce.Do(func() {
+ c.universeDomain, c.universeDomainErr = getMetadataUniverseDomain(ctx, c.MetadataClient)
+ })
+ if c.universeDomainErr != nil {
+ return "", c.universeDomainErr
+ }
+ return c.universeDomain, nil
+}
+
+// httpGetMetadataUniverseDomain is a package var for unit test substitution.
+var httpGetMetadataUniverseDomain = func(ctx context.Context, client *metadata.Client) (string, error) {
+ ctx, cancel := context.WithTimeout(ctx, 1*time.Second)
+ defer cancel()
+ return client.GetWithContext(ctx, "universe/universe-domain")
+}
+
+func getMetadataUniverseDomain(ctx context.Context, client *metadata.Client) (string, error) {
+ universeDomain, err := httpGetMetadataUniverseDomain(ctx, client)
+ if err == nil {
+ return universeDomain, nil
+ }
+ if _, ok := err.(metadata.NotDefinedError); ok {
+ // http.StatusNotFound (404)
+ return DefaultUniverseDomain, nil
+ }
+ return "", err
+}
@@ -0,0 +1,171 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package jwt
+
+import (
+ "bytes"
+ "crypto"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "strings"
+ "time"
+)
+
+const (
+ // HeaderAlgRSA256 is the RS256 [Header.Algorithm].
+ HeaderAlgRSA256 = "RS256"
+ // HeaderAlgES256 is the ES256 [Header.Algorithm].
+ HeaderAlgES256 = "ES256"
+ // HeaderType is the standard [Header.Type].
+ HeaderType = "JWT"
+)
+
+// Header represents a JWT header.
+type Header struct {
+ Algorithm string `json:"alg"`
+ Type string `json:"typ"`
+ KeyID string `json:"kid"`
+}
+
+func (h *Header) encode() (string, error) {
+ b, err := json.Marshal(h)
+ if err != nil {
+ return "", err
+ }
+ return base64.RawURLEncoding.EncodeToString(b), nil
+}
+
+// Claims represents the claims set of a JWT.
+type Claims struct {
+ // Iss is the issuer JWT claim.
+ Iss string `json:"iss"`
+ // Scope is the scope JWT claim.
+ Scope string `json:"scope,omitempty"`
+ // Exp is the expiry JWT claim. If unset, default is in one hour from now.
+ Exp int64 `json:"exp"`
+ // Iat is the subject issued at claim. If unset, default is now.
+ Iat int64 `json:"iat"`
+ // Aud is the audience JWT claim. Optional.
+ Aud string `json:"aud"`
+ // Sub is the subject JWT claim. Optional.
+ Sub string `json:"sub,omitempty"`
+ // AdditionalClaims contains any additional non-standard JWT claims. Optional.
+ AdditionalClaims map[string]interface{} `json:"-"`
+}
+
+func (c *Claims) encode() (string, error) {
+ // Compensate for skew
+ now := time.Now().Add(-10 * time.Second)
+ if c.Iat == 0 {
+ c.Iat = now.Unix()
+ }
+ if c.Exp == 0 {
+ c.Exp = now.Add(time.Hour).Unix()
+ }
+ if c.Exp < c.Iat {
+ return "", fmt.Errorf("jwt: invalid Exp = %d; must be later than Iat = %d", c.Exp, c.Iat)
+ }
+
+ b, err := json.Marshal(c)
+ if err != nil {
+ return "", err
+ }
+
+ if len(c.AdditionalClaims) == 0 {
+ return base64.RawURLEncoding.EncodeToString(b), nil
+ }
+
+ // Marshal private claim set and then append it to b.
+ prv, err := json.Marshal(c.AdditionalClaims)
+ if err != nil {
+ return "", fmt.Errorf("invalid map of additional claims %v: %w", c.AdditionalClaims, err)
+ }
+
+ // Concatenate public and private claim JSON objects.
+ if !bytes.HasSuffix(b, []byte{'}'}) {
+ return "", fmt.Errorf("invalid JSON %s", b)
+ }
+ if !bytes.HasPrefix(prv, []byte{'{'}) {
+ return "", fmt.Errorf("invalid JSON %s", prv)
+ }
+ b[len(b)-1] = ',' // Replace closing curly brace with a comma.
+ b = append(b, prv[1:]...) // Append private claims.
+ return base64.RawURLEncoding.EncodeToString(b), nil
+}
+
+// EncodeJWS encodes the data using the provided key as a JSON web signature.
+func EncodeJWS(header *Header, c *Claims, signer crypto.Signer) (string, error) {
+ head, err := header.encode()
+ if err != nil {
+ return "", err
+ }
+ claims, err := c.encode()
+ if err != nil {
+ return "", err
+ }
+ ss := fmt.Sprintf("%s.%s", head, claims)
+ h := sha256.New()
+ h.Write([]byte(ss))
+ sig, err := signer.Sign(rand.Reader, h.Sum(nil), crypto.SHA256)
+ if err != nil {
+ return "", err
+ }
+ return fmt.Sprintf("%s.%s", ss, base64.RawURLEncoding.EncodeToString(sig)), nil
+}
+
+// DecodeJWS decodes a claim set from a JWS payload.
+func DecodeJWS(payload string) (*Claims, error) {
+ // decode returned id token to get expiry
+ s := strings.Split(payload, ".")
+ if len(s) < 2 {
+ return nil, errors.New("invalid token received")
+ }
+ decoded, err := base64.RawURLEncoding.DecodeString(s[1])
+ if err != nil {
+ return nil, err
+ }
+ c := &Claims{}
+ if err := json.NewDecoder(bytes.NewBuffer(decoded)).Decode(c); err != nil {
+ return nil, err
+ }
+ if err := json.NewDecoder(bytes.NewBuffer(decoded)).Decode(&c.AdditionalClaims); err != nil {
+ return nil, err
+ }
+ return c, err
+}
+
+// VerifyJWS tests whether the provided JWT token's signature was produced by
+// the private key associated with the provided public key.
+func VerifyJWS(token string, key *rsa.PublicKey) error {
+ parts := strings.Split(token, ".")
+ if len(parts) != 3 {
+ return errors.New("jwt: invalid token received, token must have 3 parts")
+ }
+
+ signedContent := parts[0] + "." + parts[1]
+ signatureString, err := base64.RawURLEncoding.DecodeString(parts[2])
+ if err != nil {
+ return err
+ }
+
+ h := sha256.New()
+ h.Write([]byte(signedContent))
+ return rsa.VerifyPKCS1v15(key, crypto.SHA256, h.Sum(nil), signatureString)
+}
@@ -0,0 +1,368 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transport
+
+import (
+ "context"
+ "crypto/tls"
+ "crypto/x509"
+ "errors"
+ "log"
+ "log/slog"
+ "net"
+ "net/http"
+ "net/url"
+ "os"
+ "strconv"
+ "strings"
+
+ "cloud.google.com/go/auth/internal"
+ "cloud.google.com/go/auth/internal/transport/cert"
+ "github.com/google/s2a-go"
+ "github.com/google/s2a-go/fallback"
+ "google.golang.org/grpc/credentials"
+)
+
+const (
+ mTLSModeAlways = "always"
+ mTLSModeNever = "never"
+ mTLSModeAuto = "auto"
+
+ // Experimental: if true, the code will try MTLS with S2A as the default for transport security. Default value is false.
+ googleAPIUseS2AEnv = "EXPERIMENTAL_GOOGLE_API_USE_S2A"
+ googleAPIUseCertSource = "GOOGLE_API_USE_CLIENT_CERTIFICATE"
+ googleAPIUseMTLS = "GOOGLE_API_USE_MTLS_ENDPOINT"
+ googleAPIUseMTLSOld = "GOOGLE_API_USE_MTLS"
+
+ universeDomainPlaceholder = "UNIVERSE_DOMAIN"
+
+ mtlsMDSRoot = "/run/google-mds-mtls/root.crt"
+ mtlsMDSKey = "/run/google-mds-mtls/client.key"
+)
+
+// Options is a struct that is duplicated information from the individual
+// transport packages in order to avoid cyclic deps. It correlates 1:1 with
+// fields on httptransport.Options and grpctransport.Options.
+type Options struct {
+ Endpoint string
+ DefaultEndpointTemplate string
+ DefaultMTLSEndpoint string
+ ClientCertProvider cert.Provider
+ Client *http.Client
+ UniverseDomain string
+ EnableDirectPath bool
+ EnableDirectPathXds bool
+ Logger *slog.Logger
+}
+
+// getUniverseDomain returns the default service domain for a given Cloud
+// universe.
+func (o *Options) getUniverseDomain() string {
+ if o.UniverseDomain == "" {
+ return internal.DefaultUniverseDomain
+ }
+ return o.UniverseDomain
+}
+
+// isUniverseDomainGDU returns true if the universe domain is the default Google
+// universe.
+func (o *Options) isUniverseDomainGDU() bool {
+ return o.getUniverseDomain() == internal.DefaultUniverseDomain
+}
+
+// defaultEndpoint returns the DefaultEndpointTemplate merged with the
+// universe domain if the DefaultEndpointTemplate is set, otherwise returns an
+// empty string.
+func (o *Options) defaultEndpoint() string {
+ if o.DefaultEndpointTemplate == "" {
+ return ""
+ }
+ return strings.Replace(o.DefaultEndpointTemplate, universeDomainPlaceholder, o.getUniverseDomain(), 1)
+}
+
+// defaultMTLSEndpoint returns the DefaultMTLSEndpointTemplate merged with the
+// universe domain if the DefaultMTLSEndpointTemplate is set, otherwise returns an
+// empty string.
+func (o *Options) defaultMTLSEndpoint() string {
+ if o.DefaultMTLSEndpoint == "" {
+ return ""
+ }
+ return strings.Replace(o.DefaultMTLSEndpoint, universeDomainPlaceholder, o.getUniverseDomain(), 1)
+}
+
+// mergedEndpoint merges a user-provided Endpoint of format host[:port] with the
+// default endpoint.
+func (o *Options) mergedEndpoint() (string, error) {
+ defaultEndpoint := o.defaultEndpoint()
+ u, err := url.Parse(fixScheme(defaultEndpoint))
+ if err != nil {
+ return "", err
+ }
+ return strings.Replace(defaultEndpoint, u.Host, o.Endpoint, 1), nil
+}
+
+func fixScheme(baseURL string) string {
+ if !strings.Contains(baseURL, "://") {
+ baseURL = "https://" + baseURL
+ }
+ return baseURL
+}
+
+// GetGRPCTransportCredsAndEndpoint returns an instance of
+// [google.golang.org/grpc/credentials.TransportCredentials], and the
+// corresponding endpoint to use for GRPC client.
+func GetGRPCTransportCredsAndEndpoint(opts *Options) (credentials.TransportCredentials, string, error) {
+ config, err := getTransportConfig(opts)
+ if err != nil {
+ return nil, "", err
+ }
+
+ defaultTransportCreds := credentials.NewTLS(&tls.Config{
+ GetClientCertificate: config.clientCertSource,
+ })
+
+ var s2aAddr string
+ var transportCredsForS2A credentials.TransportCredentials
+
+ if config.mtlsS2AAddress != "" {
+ s2aAddr = config.mtlsS2AAddress
+ transportCredsForS2A, err = loadMTLSMDSTransportCreds(mtlsMDSRoot, mtlsMDSKey)
+ if err != nil {
+ log.Printf("Loading MTLS MDS credentials failed: %v", err)
+ if config.s2aAddress != "" {
+ s2aAddr = config.s2aAddress
+ } else {
+ return defaultTransportCreds, config.endpoint, nil
+ }
+ }
+ } else if config.s2aAddress != "" {
+ s2aAddr = config.s2aAddress
+ } else {
+ return defaultTransportCreds, config.endpoint, nil
+ }
+
+ var fallbackOpts *s2a.FallbackOptions
+ // In case of S2A failure, fall back to the endpoint that would've been used without S2A.
+ if fallbackHandshake, err := fallback.DefaultFallbackClientHandshakeFunc(config.endpoint); err == nil {
+ fallbackOpts = &s2a.FallbackOptions{
+ FallbackClientHandshakeFunc: fallbackHandshake,
+ }
+ }
+
+ s2aTransportCreds, err := s2a.NewClientCreds(&s2a.ClientOptions{
+ S2AAddress: s2aAddr,
+ TransportCreds: transportCredsForS2A,
+ FallbackOpts: fallbackOpts,
+ })
+ if err != nil {
+ // Use default if we cannot initialize S2A client transport credentials.
+ return defaultTransportCreds, config.endpoint, nil
+ }
+ return s2aTransportCreds, config.s2aMTLSEndpoint, nil
+}
+
+// GetHTTPTransportConfig returns a client certificate source and a function for
+// dialing MTLS with S2A.
+func GetHTTPTransportConfig(opts *Options) (cert.Provider, func(context.Context, string, string) (net.Conn, error), error) {
+ config, err := getTransportConfig(opts)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ var s2aAddr string
+ var transportCredsForS2A credentials.TransportCredentials
+
+ if config.mtlsS2AAddress != "" {
+ s2aAddr = config.mtlsS2AAddress
+ transportCredsForS2A, err = loadMTLSMDSTransportCreds(mtlsMDSRoot, mtlsMDSKey)
+ if err != nil {
+ log.Printf("Loading MTLS MDS credentials failed: %v", err)
+ if config.s2aAddress != "" {
+ s2aAddr = config.s2aAddress
+ } else {
+ return config.clientCertSource, nil, nil
+ }
+ }
+ } else if config.s2aAddress != "" {
+ s2aAddr = config.s2aAddress
+ } else {
+ return config.clientCertSource, nil, nil
+ }
+
+ var fallbackOpts *s2a.FallbackOptions
+ // In case of S2A failure, fall back to the endpoint that would've been used without S2A.
+ if fallbackURL, err := url.Parse(config.endpoint); err == nil {
+ if fallbackDialer, fallbackServerAddr, err := fallback.DefaultFallbackDialerAndAddress(fallbackURL.Hostname()); err == nil {
+ fallbackOpts = &s2a.FallbackOptions{
+ FallbackDialer: &s2a.FallbackDialer{
+ Dialer: fallbackDialer,
+ ServerAddr: fallbackServerAddr,
+ },
+ }
+ }
+ }
+
+ dialTLSContextFunc := s2a.NewS2ADialTLSContextFunc(&s2a.ClientOptions{
+ S2AAddress: s2aAddr,
+ TransportCreds: transportCredsForS2A,
+ FallbackOpts: fallbackOpts,
+ })
+ return nil, dialTLSContextFunc, nil
+}
+
+func loadMTLSMDSTransportCreds(mtlsMDSRootFile, mtlsMDSKeyFile string) (credentials.TransportCredentials, error) {
+ rootPEM, err := os.ReadFile(mtlsMDSRootFile)
+ if err != nil {
+ return nil, err
+ }
+ caCertPool := x509.NewCertPool()
+ ok := caCertPool.AppendCertsFromPEM(rootPEM)
+ if !ok {
+ return nil, errors.New("failed to load MTLS MDS root certificate")
+ }
+ // The mTLS MDS credentials are formatted as the concatenation of a PEM-encoded certificate chain
+ // followed by a PEM-encoded private key. For this reason, the concatenation is passed in to the
+ // tls.X509KeyPair function as both the certificate chain and private key arguments.
+ cert, err := tls.LoadX509KeyPair(mtlsMDSKeyFile, mtlsMDSKeyFile)
+ if err != nil {
+ return nil, err
+ }
+ tlsConfig := tls.Config{
+ RootCAs: caCertPool,
+ Certificates: []tls.Certificate{cert},
+ MinVersion: tls.VersionTLS13,
+ }
+ return credentials.NewTLS(&tlsConfig), nil
+}
+
+func getTransportConfig(opts *Options) (*transportConfig, error) {
+ clientCertSource, err := GetClientCertificateProvider(opts)
+ if err != nil {
+ return nil, err
+ }
+ endpoint, err := getEndpoint(opts, clientCertSource)
+ if err != nil {
+ return nil, err
+ }
+ defaultTransportConfig := transportConfig{
+ clientCertSource: clientCertSource,
+ endpoint: endpoint,
+ }
+
+ if !shouldUseS2A(clientCertSource, opts) {
+ return &defaultTransportConfig, nil
+ }
+
+ s2aAddress := GetS2AAddress(opts.Logger)
+ mtlsS2AAddress := GetMTLSS2AAddress(opts.Logger)
+ if s2aAddress == "" && mtlsS2AAddress == "" {
+ return &defaultTransportConfig, nil
+ }
+ return &transportConfig{
+ clientCertSource: clientCertSource,
+ endpoint: endpoint,
+ s2aAddress: s2aAddress,
+ mtlsS2AAddress: mtlsS2AAddress,
+ s2aMTLSEndpoint: opts.defaultMTLSEndpoint(),
+ }, nil
+}
+
+// GetClientCertificateProvider returns a default client certificate source, if
+// not provided by the user.
+//
+// A nil default source can be returned if the source does not exist. Any exceptions
+// encountered while initializing the default source will be reported as client
+// error (ex. corrupt metadata file).
+func GetClientCertificateProvider(opts *Options) (cert.Provider, error) {
+ if !isClientCertificateEnabled(opts) {
+ return nil, nil
+ } else if opts.ClientCertProvider != nil {
+ return opts.ClientCertProvider, nil
+ }
+ return cert.DefaultProvider()
+
+}
+
+// isClientCertificateEnabled returns true by default for all GDU universe domain, unless explicitly overridden by env var
+func isClientCertificateEnabled(opts *Options) bool {
+ if value, ok := os.LookupEnv(googleAPIUseCertSource); ok {
+ // error as false is OK
+ b, _ := strconv.ParseBool(value)
+ return b
+ }
+ return opts.isUniverseDomainGDU()
+}
+
+type transportConfig struct {
+ // The client certificate source.
+ clientCertSource cert.Provider
+ // The corresponding endpoint to use based on client certificate source.
+ endpoint string
+ // The plaintext S2A address if it can be used, otherwise an empty string.
+ s2aAddress string
+ // The MTLS S2A address if it can be used, otherwise an empty string.
+ mtlsS2AAddress string
+ // The MTLS endpoint to use with S2A.
+ s2aMTLSEndpoint string
+}
+
+// getEndpoint returns the endpoint for the service, taking into account the
+// user-provided endpoint override "settings.Endpoint".
+//
+// If no endpoint override is specified, we will either return the default
+// endpoint or the default mTLS endpoint if a client certificate is available.
+//
+// You can override the default endpoint choice (mTLS vs. regular) by setting
+// the GOOGLE_API_USE_MTLS_ENDPOINT environment variable.
+//
+// If the endpoint override is an address (host:port) rather than full base
+// URL (ex. https://...), then the user-provided address will be merged into
+// the default endpoint. For example, WithEndpoint("myhost:8000") and
+// DefaultEndpointTemplate("https://UNIVERSE_DOMAIN/bar/baz") will return
+// "https://myhost:8080/bar/baz". Note that this does not apply to the mTLS
+// endpoint.
+func getEndpoint(opts *Options, clientCertSource cert.Provider) (string, error) {
+ if opts.Endpoint == "" {
+ mtlsMode := getMTLSMode()
+ if mtlsMode == mTLSModeAlways || (clientCertSource != nil && mtlsMode == mTLSModeAuto) {
+ return opts.defaultMTLSEndpoint(), nil
+ }
+ return opts.defaultEndpoint(), nil
+ }
+ if strings.Contains(opts.Endpoint, "://") {
+ // User passed in a full URL path, use it verbatim.
+ return opts.Endpoint, nil
+ }
+ if opts.defaultEndpoint() == "" {
+ // If DefaultEndpointTemplate is not configured,
+ // use the user provided endpoint verbatim. This allows a naked
+ // "host[:port]" URL to be used with GRPC Direct Path.
+ return opts.Endpoint, nil
+ }
+
+ // Assume user-provided endpoint is host[:port], merge it with the default endpoint.
+ return opts.mergedEndpoint()
+}
+
+func getMTLSMode() string {
+ mode := os.Getenv(googleAPIUseMTLS)
+ if mode == "" {
+ mode = os.Getenv(googleAPIUseMTLSOld) // Deprecated.
+ }
+ if mode == "" {
+ return mTLSModeAuto
+ }
+ return strings.ToLower(mode)
+}
@@ -0,0 +1,65 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package cert
+
+import (
+ "crypto/tls"
+ "errors"
+ "sync"
+)
+
+// defaultCertData holds all the variables pertaining to
+// the default certificate provider created by [DefaultProvider].
+//
+// A singleton model is used to allow the provider to be reused
+// by the transport layer. As mentioned in [DefaultProvider] (provider nil, nil)
+// may be returned to indicate a default provider could not be found, which
+// will skip extra tls config in the transport layer .
+type defaultCertData struct {
+ once sync.Once
+ provider Provider
+ err error
+}
+
+var (
+ defaultCert defaultCertData
+)
+
+// Provider is a function that can be passed into crypto/tls.Config.GetClientCertificate.
+type Provider func(*tls.CertificateRequestInfo) (*tls.Certificate, error)
+
+// errSourceUnavailable is a sentinel error to indicate certificate source is unavailable.
+var errSourceUnavailable = errors.New("certificate source is unavailable")
+
+// DefaultProvider returns a certificate source using the preferred EnterpriseCertificateProxySource.
+// If EnterpriseCertificateProxySource is not available, fall back to the legacy SecureConnectSource.
+//
+// If neither source is available (due to missing configurations), a nil Source and a nil Error are
+// returned to indicate that a default certificate source is unavailable.
+func DefaultProvider() (Provider, error) {
+ defaultCert.once.Do(func() {
+ defaultCert.provider, defaultCert.err = NewWorkloadX509CertProvider("")
+ if errors.Is(defaultCert.err, errSourceUnavailable) {
+ defaultCert.provider, defaultCert.err = NewEnterpriseCertificateProxyProvider("")
+ if errors.Is(defaultCert.err, errSourceUnavailable) {
+ defaultCert.provider, defaultCert.err = NewSecureConnectProvider("")
+ if errors.Is(defaultCert.err, errSourceUnavailable) {
+ defaultCert.provider, defaultCert.err = nil, nil
+ }
+ }
+ }
+ })
+ return defaultCert.provider, defaultCert.err
+}
@@ -0,0 +1,54 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package cert
+
+import (
+ "crypto/tls"
+
+ "github.com/googleapis/enterprise-certificate-proxy/client"
+)
+
+type ecpSource struct {
+ key *client.Key
+}
+
+// NewEnterpriseCertificateProxyProvider creates a certificate source
+// using the Enterprise Certificate Proxy client, which delegates
+// certifcate related operations to an OS-specific "signer binary"
+// that communicates with the native keystore (ex. keychain on MacOS).
+//
+// The configFilePath points to a config file containing relevant parameters
+// such as the certificate issuer and the location of the signer binary.
+// If configFilePath is empty, the client will attempt to load the config from
+// a well-known gcloud location.
+func NewEnterpriseCertificateProxyProvider(configFilePath string) (Provider, error) {
+ key, err := client.Cred(configFilePath)
+ if err != nil {
+ // TODO(codyoss): once this is fixed upstream can handle this error a
+ // little better here. But be safe for now and assume unavailable.
+ return nil, errSourceUnavailable
+ }
+
+ return (&ecpSource{
+ key: key,
+ }).getClientCertificate, nil
+}
+
+func (s *ecpSource) getClientCertificate(info *tls.CertificateRequestInfo) (*tls.Certificate, error) {
+ var cert tls.Certificate
+ cert.PrivateKey = s.key
+ cert.Certificate = s.key.CertificateChain()
+ return &cert, nil
+}
@@ -0,0 +1,124 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package cert
+
+import (
+ "crypto/tls"
+ "crypto/x509"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "os"
+ "os/exec"
+ "os/user"
+ "path/filepath"
+ "sync"
+ "time"
+)
+
+const (
+ metadataPath = ".secureConnect"
+ metadataFile = "context_aware_metadata.json"
+)
+
+type secureConnectSource struct {
+ metadata secureConnectMetadata
+
+ // Cache the cert to avoid executing helper command repeatedly.
+ cachedCertMutex sync.Mutex
+ cachedCert *tls.Certificate
+}
+
+type secureConnectMetadata struct {
+ Cmd []string `json:"cert_provider_command"`
+}
+
+// NewSecureConnectProvider creates a certificate source using
+// the Secure Connect Helper and its associated metadata file.
+//
+// The configFilePath points to the location of the context aware metadata file.
+// If configFilePath is empty, use the default context aware metadata location.
+func NewSecureConnectProvider(configFilePath string) (Provider, error) {
+ if configFilePath == "" {
+ user, err := user.Current()
+ if err != nil {
+ // Error locating the default config means Secure Connect is not supported.
+ return nil, errSourceUnavailable
+ }
+ configFilePath = filepath.Join(user.HomeDir, metadataPath, metadataFile)
+ }
+
+ file, err := os.ReadFile(configFilePath)
+ if err != nil {
+ // Config file missing means Secure Connect is not supported.
+ // There are non-os.ErrNotExist errors that may be returned.
+ // (e.g. if the home directory is /dev/null, *nix systems will
+ // return ENOTDIR instead of ENOENT)
+ return nil, errSourceUnavailable
+ }
+
+ var metadata secureConnectMetadata
+ if err := json.Unmarshal(file, &metadata); err != nil {
+ return nil, fmt.Errorf("cert: could not parse JSON in %q: %w", configFilePath, err)
+ }
+ if err := validateMetadata(metadata); err != nil {
+ return nil, fmt.Errorf("cert: invalid config in %q: %w", configFilePath, err)
+ }
+ return (&secureConnectSource{
+ metadata: metadata,
+ }).getClientCertificate, nil
+}
+
+func validateMetadata(metadata secureConnectMetadata) error {
+ if len(metadata.Cmd) == 0 {
+ return errors.New("empty cert_provider_command")
+ }
+ return nil
+}
+
+func (s *secureConnectSource) getClientCertificate(info *tls.CertificateRequestInfo) (*tls.Certificate, error) {
+ s.cachedCertMutex.Lock()
+ defer s.cachedCertMutex.Unlock()
+ if s.cachedCert != nil && !isCertificateExpired(s.cachedCert) {
+ return s.cachedCert, nil
+ }
+ // Expand OS environment variables in the cert provider command such as "$HOME".
+ for i := 0; i < len(s.metadata.Cmd); i++ {
+ s.metadata.Cmd[i] = os.ExpandEnv(s.metadata.Cmd[i])
+ }
+ command := s.metadata.Cmd
+ data, err := exec.Command(command[0], command[1:]...).Output()
+ if err != nil {
+ return nil, err
+ }
+ cert, err := tls.X509KeyPair(data, data)
+ if err != nil {
+ return nil, err
+ }
+ s.cachedCert = &cert
+ return &cert, nil
+}
+
+// isCertificateExpired returns true if the given cert is expired or invalid.
+func isCertificateExpired(cert *tls.Certificate) bool {
+ if len(cert.Certificate) == 0 {
+ return true
+ }
+ parsed, err := x509.ParseCertificate(cert.Certificate[0])
+ if err != nil {
+ return true
+ }
+ return time.Now().After(parsed.NotAfter)
+}
@@ -0,0 +1,114 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package cert
+
+import (
+ "crypto/tls"
+ "encoding/json"
+ "errors"
+ "io"
+ "os"
+
+ "github.com/googleapis/enterprise-certificate-proxy/client/util"
+)
+
+type certConfigs struct {
+ Workload *workloadSource `json:"workload"`
+}
+
+type workloadSource struct {
+ CertPath string `json:"cert_path"`
+ KeyPath string `json:"key_path"`
+}
+
+type certificateConfig struct {
+ CertConfigs certConfigs `json:"cert_configs"`
+}
+
+// NewWorkloadX509CertProvider creates a certificate source
+// that reads a certificate and private key file from the local file system.
+// This is intended to be used for workload identity federation.
+//
+// The configFilePath points to a config file containing relevant parameters
+// such as the certificate and key file paths.
+// If configFilePath is empty, the client will attempt to load the config from
+// a well-known gcloud location.
+func NewWorkloadX509CertProvider(configFilePath string) (Provider, error) {
+ if configFilePath == "" {
+ envFilePath := util.GetConfigFilePathFromEnv()
+ if envFilePath != "" {
+ configFilePath = envFilePath
+ } else {
+ configFilePath = util.GetDefaultConfigFilePath()
+ }
+ }
+
+ certFile, keyFile, err := getCertAndKeyFiles(configFilePath)
+ if err != nil {
+ return nil, err
+ }
+
+ source := &workloadSource{
+ CertPath: certFile,
+ KeyPath: keyFile,
+ }
+ return source.getClientCertificate, nil
+}
+
+// getClientCertificate attempts to load the certificate and key from the files specified in the
+// certificate config.
+func (s *workloadSource) getClientCertificate(info *tls.CertificateRequestInfo) (*tls.Certificate, error) {
+ cert, err := tls.LoadX509KeyPair(s.CertPath, s.KeyPath)
+ if err != nil {
+ return nil, err
+ }
+ return &cert, nil
+}
+
+// getCertAndKeyFiles attempts to read the provided config file and return the certificate and private
+// key file paths.
+func getCertAndKeyFiles(configFilePath string) (string, string, error) {
+ jsonFile, err := os.Open(configFilePath)
+ if err != nil {
+ return "", "", errSourceUnavailable
+ }
+
+ byteValue, err := io.ReadAll(jsonFile)
+ if err != nil {
+ return "", "", err
+ }
+
+ var config certificateConfig
+ if err := json.Unmarshal(byteValue, &config); err != nil {
+ return "", "", err
+ }
+
+ if config.CertConfigs.Workload == nil {
+ return "", "", errSourceUnavailable
+ }
+
+ certFile := config.CertConfigs.Workload.CertPath
+ keyFile := config.CertConfigs.Workload.KeyPath
+
+ if certFile == "" {
+ return "", "", errors.New("certificate configuration is missing the certificate file location")
+ }
+
+ if keyFile == "" {
+ return "", "", errors.New("certificate configuration is missing the key file location")
+ }
+
+ return certFile, keyFile, nil
+}
@@ -0,0 +1,138 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package transport
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "log"
+ "log/slog"
+ "os"
+ "strconv"
+ "sync"
+
+ "cloud.google.com/go/auth/internal/transport/cert"
+ "cloud.google.com/go/compute/metadata"
+)
+
+const (
+ configEndpointSuffix = "instance/platform-security/auto-mtls-configuration"
+)
+
+var (
+ mtlsConfiguration *mtlsConfig
+
+ mtlsOnce sync.Once
+)
+
+// GetS2AAddress returns the S2A address to be reached via plaintext connection.
+// Returns empty string if not set or invalid.
+func GetS2AAddress(logger *slog.Logger) string {
+ getMetadataMTLSAutoConfig(logger)
+ if !mtlsConfiguration.valid() {
+ return ""
+ }
+ return mtlsConfiguration.S2A.PlaintextAddress
+}
+
+// GetMTLSS2AAddress returns the S2A address to be reached via MTLS connection.
+// Returns empty string if not set or invalid.
+func GetMTLSS2AAddress(logger *slog.Logger) string {
+ getMetadataMTLSAutoConfig(logger)
+ if !mtlsConfiguration.valid() {
+ return ""
+ }
+ return mtlsConfiguration.S2A.MTLSAddress
+}
+
+// mtlsConfig contains the configuration for establishing MTLS connections with Google APIs.
+type mtlsConfig struct {
+ S2A *s2aAddresses `json:"s2a"`
+}
+
+func (c *mtlsConfig) valid() bool {
+ return c != nil && c.S2A != nil
+}
+
+// s2aAddresses contains the plaintext and/or MTLS S2A addresses.
+type s2aAddresses struct {
+ // PlaintextAddress is the plaintext address to reach S2A
+ PlaintextAddress string `json:"plaintext_address"`
+ // MTLSAddress is the MTLS address to reach S2A
+ MTLSAddress string `json:"mtls_address"`
+}
+
+func getMetadataMTLSAutoConfig(logger *slog.Logger) {
+ var err error
+ mtlsOnce.Do(func() {
+ mtlsConfiguration, err = queryConfig(logger)
+ if err != nil {
+ log.Printf("Getting MTLS config failed: %v", err)
+ }
+ })
+}
+
+var httpGetMetadataMTLSConfig = func(logger *slog.Logger) (string, error) {
+ metadataClient := metadata.NewWithOptions(&metadata.Options{
+ Logger: logger,
+ })
+ return metadataClient.GetWithContext(context.Background(), configEndpointSuffix)
+}
+
+func queryConfig(logger *slog.Logger) (*mtlsConfig, error) {
+ resp, err := httpGetMetadataMTLSConfig(logger)
+ if err != nil {
+ return nil, fmt.Errorf("querying MTLS config from MDS endpoint failed: %w", err)
+ }
+ var config mtlsConfig
+ err = json.Unmarshal([]byte(resp), &config)
+ if err != nil {
+ return nil, fmt.Errorf("unmarshalling MTLS config from MDS endpoint failed: %w", err)
+ }
+ if config.S2A == nil {
+ return nil, fmt.Errorf("returned MTLS config from MDS endpoint is invalid: %v", config)
+ }
+ return &config, nil
+}
+
+func shouldUseS2A(clientCertSource cert.Provider, opts *Options) bool {
+ // If client cert is found, use that over S2A.
+ if clientCertSource != nil {
+ return false
+ }
+ // If EXPERIMENTAL_GOOGLE_API_USE_S2A is not set to true, skip S2A.
+ if !isGoogleS2AEnabled() {
+ return false
+ }
+ // If DefaultMTLSEndpoint is not set or has endpoint override, skip S2A.
+ if opts.DefaultMTLSEndpoint == "" || opts.Endpoint != "" {
+ return false
+ }
+ // If custom HTTP client is provided, skip S2A.
+ if opts.Client != nil {
+ return false
+ }
+ // If directPath is enabled, skip S2A.
+ return !opts.EnableDirectPath && !opts.EnableDirectPathXds
+}
+
+func isGoogleS2AEnabled() bool {
+ b, err := strconv.ParseBool(os.Getenv(googleAPIUseS2AEnv))
+ if err != nil {
+ return false
+ }
+ return b
+}
@@ -0,0 +1,106 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package transport provided internal helpers for the two transport packages
+// (grpctransport and httptransport).
+package transport
+
+import (
+ "crypto/tls"
+ "fmt"
+ "net"
+ "net/http"
+ "time"
+
+ "cloud.google.com/go/auth/credentials"
+)
+
+// CloneDetectOptions clones a user set detect option into some new memory that
+// we can internally manipulate before sending onto the detect package.
+func CloneDetectOptions(oldDo *credentials.DetectOptions) *credentials.DetectOptions {
+ if oldDo == nil {
+ // it is valid for users not to set this, but we will need to to default
+ // some options for them in this case so return some initialized memory
+ // to work with.
+ return &credentials.DetectOptions{}
+ }
+ newDo := &credentials.DetectOptions{
+ // Simple types
+ Audience: oldDo.Audience,
+ Subject: oldDo.Subject,
+ EarlyTokenRefresh: oldDo.EarlyTokenRefresh,
+ TokenURL: oldDo.TokenURL,
+ STSAudience: oldDo.STSAudience,
+ CredentialsFile: oldDo.CredentialsFile,
+ UseSelfSignedJWT: oldDo.UseSelfSignedJWT,
+ UniverseDomain: oldDo.UniverseDomain,
+
+ // These fields are are pointer types that we just want to use exactly
+ // as the user set, copy the ref
+ Client: oldDo.Client,
+ Logger: oldDo.Logger,
+ AuthHandlerOptions: oldDo.AuthHandlerOptions,
+ }
+
+ // Smartly size this memory and copy below.
+ if len(oldDo.CredentialsJSON) > 0 {
+ newDo.CredentialsJSON = make([]byte, len(oldDo.CredentialsJSON))
+ copy(newDo.CredentialsJSON, oldDo.CredentialsJSON)
+ }
+ if len(oldDo.Scopes) > 0 {
+ newDo.Scopes = make([]string, len(oldDo.Scopes))
+ copy(newDo.Scopes, oldDo.Scopes)
+ }
+
+ return newDo
+}
+
+// ValidateUniverseDomain verifies that the universe domain configured for the
+// client matches the universe domain configured for the credentials.
+func ValidateUniverseDomain(clientUniverseDomain, credentialsUniverseDomain string) error {
+ if clientUniverseDomain != credentialsUniverseDomain {
+ return fmt.Errorf(
+ "the configured universe domain (%q) does not match the universe "+
+ "domain found in the credentials (%q). If you haven't configured "+
+ "the universe domain explicitly, \"googleapis.com\" is the default",
+ clientUniverseDomain,
+ credentialsUniverseDomain)
+ }
+ return nil
+}
+
+// DefaultHTTPClientWithTLS constructs an HTTPClient using the provided tlsConfig, to support mTLS.
+func DefaultHTTPClientWithTLS(tlsConfig *tls.Config) *http.Client {
+ trans := BaseTransport()
+ trans.TLSClientConfig = tlsConfig
+ return &http.Client{Transport: trans}
+}
+
+// BaseTransport returns a default [http.Transport] which can be used if
+// [http.DefaultTransport] has been overwritten.
+func BaseTransport() *http.Transport {
+ return &http.Transport{
+ Proxy: http.ProxyFromEnvironment,
+ DialContext: (&net.Dialer{
+ Timeout: 30 * time.Second,
+ KeepAlive: 30 * time.Second,
+ DualStack: true,
+ }).DialContext,
+ MaxIdleConns: 100,
+ MaxIdleConnsPerHost: 100,
+ IdleConnTimeout: 90 * time.Second,
+ TLSHandshakeTimeout: 10 * time.Second,
+ ExpectContinueTimeout: 1 * time.Second,
+ }
+}
@@ -0,0 +1,382 @@
+// Copyright 2023 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package auth
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "log/slog"
+ "mime"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "cloud.google.com/go/auth/internal"
+ "github.com/googleapis/gax-go/v2/internallog"
+)
+
+// AuthorizationHandler is a 3-legged-OAuth helper that prompts the user for
+// OAuth consent at the specified auth code URL and returns an auth code and
+// state upon approval.
+type AuthorizationHandler func(authCodeURL string) (code string, state string, err error)
+
+// Options3LO are the options for doing a 3-legged OAuth2 flow.
+type Options3LO struct {
+ // ClientID is the application's ID.
+ ClientID string
+ // ClientSecret is the application's secret. Not required if AuthHandlerOpts
+ // is set.
+ ClientSecret string
+ // AuthURL is the URL for authenticating.
+ AuthURL string
+ // TokenURL is the URL for retrieving a token.
+ TokenURL string
+ // AuthStyle is used to describe how to client info in the token request.
+ AuthStyle Style
+ // RefreshToken is the token used to refresh the credential. Not required
+ // if AuthHandlerOpts is set.
+ RefreshToken string
+ // RedirectURL is the URL to redirect users to. Optional.
+ RedirectURL string
+ // Scopes specifies requested permissions for the Token. Optional.
+ Scopes []string
+
+ // URLParams are the set of values to apply to the token exchange. Optional.
+ URLParams url.Values
+ // Client is the client to be used to make the underlying token requests.
+ // Optional.
+ Client *http.Client
+ // EarlyTokenExpiry is the time before the token expires that it should be
+ // refreshed. If not set the default value is 3 minutes and 45 seconds.
+ // Optional.
+ EarlyTokenExpiry time.Duration
+
+ // AuthHandlerOpts provides a set of options for doing a
+ // 3-legged OAuth2 flow with a custom [AuthorizationHandler]. Optional.
+ AuthHandlerOpts *AuthorizationHandlerOptions
+ // Logger is used for debug logging. If provided, logging will be enabled
+ // at the loggers configured level. By default logging is disabled unless
+ // enabled by setting GOOGLE_SDK_GO_LOGGING_LEVEL in which case a default
+ // logger will be used. Optional.
+ Logger *slog.Logger
+}
+
+func (o *Options3LO) validate() error {
+ if o == nil {
+ return errors.New("auth: options must be provided")
+ }
+ if o.ClientID == "" {
+ return errors.New("auth: client ID must be provided")
+ }
+ if o.AuthHandlerOpts == nil && o.ClientSecret == "" {
+ return errors.New("auth: client secret must be provided")
+ }
+ if o.AuthURL == "" {
+ return errors.New("auth: auth URL must be provided")
+ }
+ if o.TokenURL == "" {
+ return errors.New("auth: token URL must be provided")
+ }
+ if o.AuthStyle == StyleUnknown {
+ return errors.New("auth: auth style must be provided")
+ }
+ if o.AuthHandlerOpts == nil && o.RefreshToken == "" {
+ return errors.New("auth: refresh token must be provided")
+ }
+ return nil
+}
+
+func (o *Options3LO) logger() *slog.Logger {
+ return internallog.New(o.Logger)
+}
+
+// PKCEOptions holds parameters to support PKCE.
+type PKCEOptions struct {
+ // Challenge is the un-padded, base64-url-encoded string of the encrypted code verifier.
+ Challenge string // The un-padded, base64-url-encoded string of the encrypted code verifier.
+ // ChallengeMethod is the encryption method (ex. S256).
+ ChallengeMethod string
+ // Verifier is the original, non-encrypted secret.
+ Verifier string // The original, non-encrypted secret.
+}
+
+type tokenJSON struct {
+ AccessToken string `json:"access_token"`
+ TokenType string `json:"token_type"`
+ RefreshToken string `json:"refresh_token"`
+ ExpiresIn int `json:"expires_in"`
+ // error fields
+ ErrorCode string `json:"error"`
+ ErrorDescription string `json:"error_description"`
+ ErrorURI string `json:"error_uri"`
+}
+
+func (e *tokenJSON) expiry() (t time.Time) {
+ if v := e.ExpiresIn; v != 0 {
+ return time.Now().Add(time.Duration(v) * time.Second)
+ }
+ return
+}
+
+func (o *Options3LO) client() *http.Client {
+ if o.Client != nil {
+ return o.Client
+ }
+ return internal.DefaultClient()
+}
+
+// authCodeURL returns a URL that points to a OAuth2 consent page.
+func (o *Options3LO) authCodeURL(state string, values url.Values) string {
+ var buf bytes.Buffer
+ buf.WriteString(o.AuthURL)
+ v := url.Values{
+ "response_type": {"code"},
+ "client_id": {o.ClientID},
+ }
+ if o.RedirectURL != "" {
+ v.Set("redirect_uri", o.RedirectURL)
+ }
+ if len(o.Scopes) > 0 {
+ v.Set("scope", strings.Join(o.Scopes, " "))
+ }
+ if state != "" {
+ v.Set("state", state)
+ }
+ if o.AuthHandlerOpts != nil {
+ if o.AuthHandlerOpts.PKCEOpts != nil &&
+ o.AuthHandlerOpts.PKCEOpts.Challenge != "" {
+ v.Set(codeChallengeKey, o.AuthHandlerOpts.PKCEOpts.Challenge)
+ }
+ if o.AuthHandlerOpts.PKCEOpts != nil &&
+ o.AuthHandlerOpts.PKCEOpts.ChallengeMethod != "" {
+ v.Set(codeChallengeMethodKey, o.AuthHandlerOpts.PKCEOpts.ChallengeMethod)
+ }
+ }
+ for k := range values {
+ v.Set(k, v.Get(k))
+ }
+ if strings.Contains(o.AuthURL, "?") {
+ buf.WriteByte('&')
+ } else {
+ buf.WriteByte('?')
+ }
+ buf.WriteString(v.Encode())
+ return buf.String()
+}
+
+// New3LOTokenProvider returns a [TokenProvider] based on the 3-legged OAuth2
+// configuration. The TokenProvider is caches and auto-refreshes tokens by
+// default.
+func New3LOTokenProvider(opts *Options3LO) (TokenProvider, error) {
+ if err := opts.validate(); err != nil {
+ return nil, err
+ }
+ if opts.AuthHandlerOpts != nil {
+ return new3LOTokenProviderWithAuthHandler(opts), nil
+ }
+ return NewCachedTokenProvider(&tokenProvider3LO{opts: opts, refreshToken: opts.RefreshToken, client: opts.client()}, &CachedTokenProviderOptions{
+ ExpireEarly: opts.EarlyTokenExpiry,
+ }), nil
+}
+
+// AuthorizationHandlerOptions provides a set of options to specify for doing a
+// 3-legged OAuth2 flow with a custom [AuthorizationHandler].
+type AuthorizationHandlerOptions struct {
+ // AuthorizationHandler specifies the handler used to for the authorization
+ // part of the flow.
+ Handler AuthorizationHandler
+ // State is used verify that the "state" is identical in the request and
+ // response before exchanging the auth code for OAuth2 token.
+ State string
+ // PKCEOpts allows setting configurations for PKCE. Optional.
+ PKCEOpts *PKCEOptions
+}
+
+func new3LOTokenProviderWithAuthHandler(opts *Options3LO) TokenProvider {
+ return NewCachedTokenProvider(&tokenProviderWithHandler{opts: opts, state: opts.AuthHandlerOpts.State}, &CachedTokenProviderOptions{
+ ExpireEarly: opts.EarlyTokenExpiry,
+ })
+}
+
+// exchange handles the final exchange portion of the 3lo flow. Returns a Token,
+// refreshToken, and error.
+func (o *Options3LO) exchange(ctx context.Context, code string) (*Token, string, error) {
+ // Build request
+ v := url.Values{
+ "grant_type": {"authorization_code"},
+ "code": {code},
+ }
+ if o.RedirectURL != "" {
+ v.Set("redirect_uri", o.RedirectURL)
+ }
+ if o.AuthHandlerOpts != nil &&
+ o.AuthHandlerOpts.PKCEOpts != nil &&
+ o.AuthHandlerOpts.PKCEOpts.Verifier != "" {
+ v.Set(codeVerifierKey, o.AuthHandlerOpts.PKCEOpts.Verifier)
+ }
+ for k := range o.URLParams {
+ v.Set(k, o.URLParams.Get(k))
+ }
+ return fetchToken(ctx, o, v)
+}
+
+// This struct is not safe for concurrent access alone, but the way it is used
+// in this package by wrapping it with a cachedTokenProvider makes it so.
+type tokenProvider3LO struct {
+ opts *Options3LO
+ client *http.Client
+ refreshToken string
+}
+
+func (tp *tokenProvider3LO) Token(ctx context.Context) (*Token, error) {
+ if tp.refreshToken == "" {
+ return nil, errors.New("auth: token expired and refresh token is not set")
+ }
+ v := url.Values{
+ "grant_type": {"refresh_token"},
+ "refresh_token": {tp.refreshToken},
+ }
+ for k := range tp.opts.URLParams {
+ v.Set(k, tp.opts.URLParams.Get(k))
+ }
+
+ tk, rt, err := fetchToken(ctx, tp.opts, v)
+ if err != nil {
+ return nil, err
+ }
+ if tp.refreshToken != rt && rt != "" {
+ tp.refreshToken = rt
+ }
+ return tk, err
+}
+
+type tokenProviderWithHandler struct {
+ opts *Options3LO
+ state string
+}
+
+func (tp tokenProviderWithHandler) Token(ctx context.Context) (*Token, error) {
+ url := tp.opts.authCodeURL(tp.state, nil)
+ code, state, err := tp.opts.AuthHandlerOpts.Handler(url)
+ if err != nil {
+ return nil, err
+ }
+ if state != tp.state {
+ return nil, errors.New("auth: state mismatch in 3-legged-OAuth flow")
+ }
+ tok, _, err := tp.opts.exchange(ctx, code)
+ return tok, err
+}
+
+// fetchToken returns a Token, refresh token, and/or an error.
+func fetchToken(ctx context.Context, o *Options3LO, v url.Values) (*Token, string, error) {
+ var refreshToken string
+ if o.AuthStyle == StyleInParams {
+ if o.ClientID != "" {
+ v.Set("client_id", o.ClientID)
+ }
+ if o.ClientSecret != "" {
+ v.Set("client_secret", o.ClientSecret)
+ }
+ }
+ req, err := http.NewRequestWithContext(ctx, "POST", o.TokenURL, strings.NewReader(v.Encode()))
+ if err != nil {
+ return nil, refreshToken, err
+ }
+ req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
+ if o.AuthStyle == StyleInHeader {
+ req.SetBasicAuth(url.QueryEscape(o.ClientID), url.QueryEscape(o.ClientSecret))
+ }
+ logger := o.logger()
+
+ logger.DebugContext(ctx, "3LO token request", "request", internallog.HTTPRequest(req, []byte(v.Encode())))
+ // Make request
+ resp, body, err := internal.DoRequest(o.client(), req)
+ if err != nil {
+ return nil, refreshToken, err
+ }
+ logger.DebugContext(ctx, "3LO token response", "response", internallog.HTTPResponse(resp, body))
+ failureStatus := resp.StatusCode < 200 || resp.StatusCode > 299
+ tokError := &Error{
+ Response: resp,
+ Body: body,
+ }
+
+ var token *Token
+ // errors ignored because of default switch on content
+ content, _, _ := mime.ParseMediaType(resp.Header.Get("Content-Type"))
+ switch content {
+ case "application/x-www-form-urlencoded", "text/plain":
+ // some endpoints return a query string
+ vals, err := url.ParseQuery(string(body))
+ if err != nil {
+ if failureStatus {
+ return nil, refreshToken, tokError
+ }
+ return nil, refreshToken, fmt.Errorf("auth: cannot parse response: %w", err)
+ }
+ tokError.code = vals.Get("error")
+ tokError.description = vals.Get("error_description")
+ tokError.uri = vals.Get("error_uri")
+ token = &Token{
+ Value: vals.Get("access_token"),
+ Type: vals.Get("token_type"),
+ Metadata: make(map[string]interface{}, len(vals)),
+ }
+ for k, v := range vals {
+ token.Metadata[k] = v
+ }
+ refreshToken = vals.Get("refresh_token")
+ e := vals.Get("expires_in")
+ expires, _ := strconv.Atoi(e)
+ if expires != 0 {
+ token.Expiry = time.Now().Add(time.Duration(expires) * time.Second)
+ }
+ default:
+ var tj tokenJSON
+ if err = json.Unmarshal(body, &tj); err != nil {
+ if failureStatus {
+ return nil, refreshToken, tokError
+ }
+ return nil, refreshToken, fmt.Errorf("auth: cannot parse json: %w", err)
+ }
+ tokError.code = tj.ErrorCode
+ tokError.description = tj.ErrorDescription
+ tokError.uri = tj.ErrorURI
+ token = &Token{
+ Value: tj.AccessToken,
+ Type: tj.TokenType,
+ Expiry: tj.expiry(),
+ Metadata: make(map[string]interface{}),
+ }
+ json.Unmarshal(body, &token.Metadata) // optional field, skip err check
+ refreshToken = tj.RefreshToken
+ }
+ // according to spec, servers should respond status 400 in error case
+ // https://www.rfc-editor.org/rfc/rfc6749#section-5.2
+ // but some unorthodox servers respond 200 in error case
+ if failureStatus || tokError.code != "" {
+ return nil, refreshToken, tokError
+ }
+ if token.Value == "" {
+ return nil, refreshToken, errors.New("auth: server response missing access_token")
+ }
+ return token, refreshToken, nil
+}
@@ -0,0 +1,350 @@
+// Copyright 2016 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package civil implements types for civil time, a time-zone-independent
+// representation of time that follows the rules of the proleptic
+// Gregorian calendar with exactly 24-hour days, 60-minute hours, and 60-second
+// minutes.
+//
+// Because they lack location information, these types do not represent unique
+// moments or intervals of time. Use time.Time for that purpose.
+package civil
+
+import (
+ "fmt"
+ "time"
+)
+
+// A Date represents a date (year, month, day).
+//
+// This type does not include location information, and therefore does not
+// describe a unique 24-hour timespan.
+type Date struct {
+ Year int // Year (e.g., 2014).
+ Month time.Month // Month of the year (January = 1, ...).
+ Day int // Day of the month, starting at 1.
+}
+
+// DateOf returns the Date in which a time occurs in that time's location.
+func DateOf(t time.Time) Date {
+ var d Date
+ d.Year, d.Month, d.Day = t.Date()
+ return d
+}
+
+// ParseDate parses a string in RFC3339 full-date format and returns the date value it represents.
+func ParseDate(s string) (Date, error) {
+ t, err := time.Parse("2006-01-02", s)
+ if err != nil {
+ return Date{}, err
+ }
+ return DateOf(t), nil
+}
+
+// String returns the date in RFC3339 full-date format.
+func (d Date) String() string {
+ return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day)
+}
+
+// IsValid reports whether the date is valid.
+func (d Date) IsValid() bool {
+ return DateOf(d.In(time.UTC)) == d
+}
+
+// In returns the time corresponding to time 00:00:00 of the date in the location.
+//
+// In is always consistent with time.Date, even when time.Date returns a time
+// on a different day. For example, if loc is America/Indiana/Vincennes, then both
+//
+// time.Date(1955, time.May, 1, 0, 0, 0, 0, loc)
+//
+// and
+//
+// civil.Date{Year: 1955, Month: time.May, Day: 1}.In(loc)
+//
+// return 23:00:00 on April 30, 1955.
+//
+// In panics if loc is nil.
+func (d Date) In(loc *time.Location) time.Time {
+ return time.Date(d.Year, d.Month, d.Day, 0, 0, 0, 0, loc)
+}
+
+// AddDays returns the date that is n days in the future.
+// n can also be negative to go into the past.
+func (d Date) AddDays(n int) Date {
+ return DateOf(d.In(time.UTC).AddDate(0, 0, n))
+}
+
+// DaysSince returns the signed number of days between the date and s, not including the end day.
+// This is the inverse operation to AddDays.
+func (d Date) DaysSince(s Date) (days int) {
+ // We convert to Unix time so we do not have to worry about leap seconds:
+ // Unix time increases by exactly 86400 seconds per day.
+ deltaUnix := d.In(time.UTC).Unix() - s.In(time.UTC).Unix()
+ return int(deltaUnix / 86400)
+}
+
+// Before reports whether d occurs before d2.
+func (d Date) Before(d2 Date) bool {
+ if d.Year != d2.Year {
+ return d.Year < d2.Year
+ }
+ if d.Month != d2.Month {
+ return d.Month < d2.Month
+ }
+ return d.Day < d2.Day
+}
+
+// After reports whether d occurs after d2.
+func (d Date) After(d2 Date) bool {
+ return d2.Before(d)
+}
+
+// Compare compares d and d2. If d is before d2, it returns -1;
+// if d is after d2, it returns +1; otherwise it returns 0.
+func (d Date) Compare(d2 Date) int {
+ if d.Before(d2) {
+ return -1
+ } else if d.After(d2) {
+ return +1
+ }
+ return 0
+}
+
+// IsZero reports whether date fields are set to their default value.
+func (d Date) IsZero() bool {
+ return (d.Year == 0) && (int(d.Month) == 0) && (d.Day == 0)
+}
+
+// MarshalText implements the encoding.TextMarshaler interface.
+// The output is the result of d.String().
+func (d Date) MarshalText() ([]byte, error) {
+ return []byte(d.String()), nil
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface.
+// The date is expected to be a string in a format accepted by ParseDate.
+func (d *Date) UnmarshalText(data []byte) error {
+ var err error
+ *d, err = ParseDate(string(data))
+ return err
+}
+
+// A Time represents a time with nanosecond precision.
+//
+// This type does not include location information, and therefore does not
+// describe a unique moment in time.
+//
+// This type exists to represent the TIME type in storage-based APIs like BigQuery.
+// Most operations on Times are unlikely to be meaningful. Prefer the DateTime type.
+type Time struct {
+ Hour int // The hour of the day in 24-hour format; range [0-23]
+ Minute int // The minute of the hour; range [0-59]
+ Second int // The second of the minute; range [0-59]
+ Nanosecond int // The nanosecond of the second; range [0-999999999]
+}
+
+// TimeOf returns the Time representing the time of day in which a time occurs
+// in that time's location. It ignores the date.
+func TimeOf(t time.Time) Time {
+ var tm Time
+ tm.Hour, tm.Minute, tm.Second = t.Clock()
+ tm.Nanosecond = t.Nanosecond()
+ return tm
+}
+
+// ParseTime parses a string and returns the time value it represents.
+// ParseTime accepts an extended form of the RFC3339 partial-time format. After
+// the HH:MM:SS part of the string, an optional fractional part may appear,
+// consisting of a decimal point followed by one to nine decimal digits.
+// (RFC3339 admits only one digit after the decimal point).
+func ParseTime(s string) (Time, error) {
+ t, err := time.Parse("15:04:05.999999999", s)
+ if err != nil {
+ return Time{}, err
+ }
+ return TimeOf(t), nil
+}
+
+// String returns the date in the format described in ParseTime. If Nanoseconds
+// is zero, no fractional part will be generated. Otherwise, the result will
+// end with a fractional part consisting of a decimal point and nine digits.
+func (t Time) String() string {
+ s := fmt.Sprintf("%02d:%02d:%02d", t.Hour, t.Minute, t.Second)
+ if t.Nanosecond == 0 {
+ return s
+ }
+ return s + fmt.Sprintf(".%09d", t.Nanosecond)
+}
+
+// IsValid reports whether the time is valid.
+func (t Time) IsValid() bool {
+ // Construct a non-zero time.
+ tm := time.Date(2, 2, 2, t.Hour, t.Minute, t.Second, t.Nanosecond, time.UTC)
+ return TimeOf(tm) == t
+}
+
+// IsZero reports whether time fields are set to their default value.
+func (t Time) IsZero() bool {
+ return (t.Hour == 0) && (t.Minute == 0) && (t.Second == 0) && (t.Nanosecond == 0)
+}
+
+// Before reports whether t occurs before t2.
+func (t Time) Before(t2 Time) bool {
+ if t.Hour != t2.Hour {
+ return t.Hour < t2.Hour
+ }
+ if t.Minute != t2.Minute {
+ return t.Minute < t2.Minute
+ }
+ if t.Second != t2.Second {
+ return t.Second < t2.Second
+ }
+
+ return t.Nanosecond < t2.Nanosecond
+}
+
+// After reports whether t occurs after t2.
+func (t Time) After(t2 Time) bool {
+ return t2.Before(t)
+}
+
+// Compare compares t and t2. If t is before t2, it returns -1;
+// if t is after t2, it returns +1; otherwise it returns 0.
+func (t Time) Compare(t2 Time) int {
+ if t.Before(t2) {
+ return -1
+ } else if t.After(t2) {
+ return +1
+ }
+ return 0
+}
+
+// MarshalText implements the encoding.TextMarshaler interface.
+// The output is the result of t.String().
+func (t Time) MarshalText() ([]byte, error) {
+ return []byte(t.String()), nil
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface.
+// The time is expected to be a string in a format accepted by ParseTime.
+func (t *Time) UnmarshalText(data []byte) error {
+ var err error
+ *t, err = ParseTime(string(data))
+ return err
+}
+
+// A DateTime represents a date and time.
+//
+// This type does not include location information, and therefore does not
+// describe a unique moment in time.
+type DateTime struct {
+ Date Date
+ Time Time
+}
+
+// Note: We deliberately do not embed Date into DateTime, to avoid promoting AddDays and Sub.
+
+// DateTimeOf returns the DateTime in which a time occurs in that time's location.
+func DateTimeOf(t time.Time) DateTime {
+ return DateTime{
+ Date: DateOf(t),
+ Time: TimeOf(t),
+ }
+}
+
+// ParseDateTime parses a string and returns the DateTime it represents.
+// ParseDateTime accepts a variant of the RFC3339 date-time format that omits
+// the time offset but includes an optional fractional time, as described in
+// ParseTime. Informally, the accepted format is
+//
+// YYYY-MM-DDTHH:MM:SS[.FFFFFFFFF]
+//
+// where the 'T' may be a lower-case 't'.
+func ParseDateTime(s string) (DateTime, error) {
+ t, err := time.Parse("2006-01-02T15:04:05.999999999", s)
+ if err != nil {
+ t, err = time.Parse("2006-01-02t15:04:05.999999999", s)
+ if err != nil {
+ return DateTime{}, err
+ }
+ }
+ return DateTimeOf(t), nil
+}
+
+// String returns the date in the format described in ParseDate.
+func (dt DateTime) String() string {
+ return dt.Date.String() + "T" + dt.Time.String()
+}
+
+// IsValid reports whether the datetime is valid.
+func (dt DateTime) IsValid() bool {
+ return dt.Date.IsValid() && dt.Time.IsValid()
+}
+
+// In returns the time corresponding to the DateTime in the given location.
+//
+// If the time is missing or ambigous at the location, In returns the same
+// result as time.Date. For example, if loc is America/Indiana/Vincennes, then
+// both
+//
+// time.Date(1955, time.May, 1, 0, 30, 0, 0, loc)
+//
+// and
+//
+// civil.DateTime{
+// civil.Date{Year: 1955, Month: time.May, Day: 1}},
+// civil.Time{Minute: 30}}.In(loc)
+//
+// return 23:30:00 on April 30, 1955.
+//
+// In panics if loc is nil.
+func (dt DateTime) In(loc *time.Location) time.Time {
+ return time.Date(dt.Date.Year, dt.Date.Month, dt.Date.Day, dt.Time.Hour, dt.Time.Minute, dt.Time.Second, dt.Time.Nanosecond, loc)
+}
+
+// Before reports whether dt occurs before dt2.
+func (dt DateTime) Before(dt2 DateTime) bool {
+ return dt.In(time.UTC).Before(dt2.In(time.UTC))
+}
+
+// After reports whether dt occurs after dt2.
+func (dt DateTime) After(dt2 DateTime) bool {
+ return dt2.Before(dt)
+}
+
+// Compare compares dt and dt2. If dt is before dt2, it returns -1;
+// if dt is after dt2, it returns +1; otherwise it returns 0.
+func (dt DateTime) Compare(dt2 DateTime) int {
+ return dt.In(time.UTC).Compare(dt2.In(time.UTC))
+}
+
+// IsZero reports whether datetime fields are set to their default value.
+func (dt DateTime) IsZero() bool {
+ return dt.Date.IsZero() && dt.Time.IsZero()
+}
+
+// MarshalText implements the encoding.TextMarshaler interface.
+// The output is the result of dt.String().
+func (dt DateTime) MarshalText() ([]byte, error) {
+ return []byte(dt.String()), nil
+}
+
+// UnmarshalText implements the encoding.TextUnmarshaler interface.
+// The datetime is expected to be a string in a format accepted by ParseDateTime
+func (dt *DateTime) UnmarshalText(data []byte) error {
+ var err error
+ *dt, err = ParseDateTime(string(data))
+ return err
+}
@@ -0,0 +1,66 @@
+# Changes
+
+## [0.6.0](https://github.com/googleapis/google-cloud-go/compare/compute/metadata/v0.5.2...compute/metadata/v0.6.0) (2024-12-13)
+
+
+### Features
+
+* **compute/metadata:** Add debug logging ([#11078](https://github.com/googleapis/google-cloud-go/issues/11078)) ([a816814](https://github.com/googleapis/google-cloud-go/commit/a81681463906e4473570a2f426eb0dc2de64e53f))
+
+## [0.5.2](https://github.com/googleapis/google-cloud-go/compare/compute/metadata/v0.5.1...compute/metadata/v0.5.2) (2024-09-20)
+
+
+### Bug Fixes
+
+* **compute/metadata:** Close Response Body for failed request ([#10891](https://github.com/googleapis/google-cloud-go/issues/10891)) ([e91d45e](https://github.com/googleapis/google-cloud-go/commit/e91d45e4757a9e354114509ba9800085d9e0ff1f))
+
+## [0.5.1](https://github.com/googleapis/google-cloud-go/compare/compute/metadata/v0.5.0...compute/metadata/v0.5.1) (2024-09-12)
+
+
+### Bug Fixes
+
+* **compute/metadata:** Check error chain for retryable error ([#10840](https://github.com/googleapis/google-cloud-go/issues/10840)) ([2bdedef](https://github.com/googleapis/google-cloud-go/commit/2bdedeff621b223d63cebc4355fcf83bc68412cd))
+
+## [0.5.0](https://github.com/googleapis/google-cloud-go/compare/compute/metadata/v0.4.0...compute/metadata/v0.5.0) (2024-07-10)
+
+
+### Features
+
+* **compute/metadata:** Add sys check for windows OnGCE ([#10521](https://github.com/googleapis/google-cloud-go/issues/10521)) ([3b9a830](https://github.com/googleapis/google-cloud-go/commit/3b9a83063960d2a2ac20beb47cc15818a68bd302))
+
+## [0.4.0](https://github.com/googleapis/google-cloud-go/compare/compute/metadata/v0.3.0...compute/metadata/v0.4.0) (2024-07-01)
+
+
+### Features
+
+* **compute/metadata:** Add context for all functions/methods ([#10370](https://github.com/googleapis/google-cloud-go/issues/10370)) ([66b8efe](https://github.com/googleapis/google-cloud-go/commit/66b8efe7ad877e052b2987bb4475477e38c67bb3))
+
+
+### Documentation
+
+* **compute/metadata:** Update OnGCE description ([#10408](https://github.com/googleapis/google-cloud-go/issues/10408)) ([6a46dca](https://github.com/googleapis/google-cloud-go/commit/6a46dca4eae4f88ec6f88822e01e5bf8aeca787f))
+
+## [0.3.0](https://github.com/googleapis/google-cloud-go/compare/compute/metadata/v0.2.3...compute/metadata/v0.3.0) (2024-04-15)
+
+
+### Features
+
+* **compute/metadata:** Add context aware functions ([#9733](https://github.com/googleapis/google-cloud-go/issues/9733)) ([e4eb5b4](https://github.com/googleapis/google-cloud-go/commit/e4eb5b46ee2aec9d2fc18300bfd66015e25a0510))
+
+## [0.2.3](https://github.com/googleapis/google-cloud-go/compare/compute/metadata/v0.2.2...compute/metadata/v0.2.3) (2022-12-15)
+
+
+### Bug Fixes
+
+* **compute/metadata:** Switch DNS lookup to an absolute lookup ([119b410](https://github.com/googleapis/google-cloud-go/commit/119b41060c7895e45e48aee5621ad35607c4d021)), refs [#7165](https://github.com/googleapis/google-cloud-go/issues/7165)
+
+## [0.2.2](https://github.com/googleapis/google-cloud-go/compare/compute/metadata/v0.2.1...compute/metadata/v0.2.2) (2022-12-01)
+
+
+### Bug Fixes
+
+* **compute/metadata:** Set IdleConnTimeout for http.Client ([#7084](https://github.com/googleapis/google-cloud-go/issues/7084)) ([766516a](https://github.com/googleapis/google-cloud-go/commit/766516aaf3816bfb3159efeea65aa3d1d205a3e2)), refs [#5430](https://github.com/googleapis/google-cloud-go/issues/5430)
+
+## [0.1.0] (2022-10-26)
+
+Initial release of metadata being it's own module.
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
@@ -0,0 +1,27 @@
+# Compute API
+
+[](https://pkg.go.dev/cloud.google.com/go/compute/metadata)
+
+This is a utility library for communicating with Google Cloud metadata service
+on Google Cloud.
+
+## Install
+
+```bash
+go get cloud.google.com/go/compute/metadata
+```
+
+## Go Version Support
+
+See the [Go Versions Supported](https://github.com/googleapis/google-cloud-go#go-versions-supported)
+section in the root directory's README.
+
+## Contributing
+
+Contributions are welcome. Please, see the [CONTRIBUTING](https://github.com/GoogleCloudPlatform/google-cloud-go/blob/main/CONTRIBUTING.md)
+document for details.
+
+Please note that this project is released with a Contributor Code of Conduct.
+By participating in this project you agree to abide by its terms. See
+[Contributor Code of Conduct](https://github.com/GoogleCloudPlatform/google-cloud-go/blob/main/CONTRIBUTING.md#contributor-code-of-conduct)
+for more information.
@@ -0,0 +1,149 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metadata
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "fmt"
+ "log/slog"
+ "net/http"
+ "strings"
+)
+
+// Code below this point is copied from github.com/googleapis/gax-go/v2/internallog
+// to avoid the dependency. The compute/metadata module is used by too many
+// non-client library modules that can't justify the dependency.
+
+// The handler returned if logging is not enabled.
+type noOpHandler struct{}
+
+func (h noOpHandler) Enabled(_ context.Context, _ slog.Level) bool {
+ return false
+}
+
+func (h noOpHandler) Handle(_ context.Context, _ slog.Record) error {
+ return nil
+}
+
+func (h noOpHandler) WithAttrs(_ []slog.Attr) slog.Handler {
+ return h
+}
+
+func (h noOpHandler) WithGroup(_ string) slog.Handler {
+ return h
+}
+
+// httpRequest returns a lazily evaluated [slog.LogValuer] for a
+// [http.Request] and the associated body.
+func httpRequest(req *http.Request, body []byte) slog.LogValuer {
+ return &request{
+ req: req,
+ payload: body,
+ }
+}
+
+type request struct {
+ req *http.Request
+ payload []byte
+}
+
+func (r *request) LogValue() slog.Value {
+ if r == nil || r.req == nil {
+ return slog.Value{}
+ }
+ var groupValueAttrs []slog.Attr
+ groupValueAttrs = append(groupValueAttrs, slog.String("method", r.req.Method))
+ groupValueAttrs = append(groupValueAttrs, slog.String("url", r.req.URL.String()))
+
+ var headerAttr []slog.Attr
+ for k, val := range r.req.Header {
+ headerAttr = append(headerAttr, slog.String(k, strings.Join(val, ",")))
+ }
+ if len(headerAttr) > 0 {
+ groupValueAttrs = append(groupValueAttrs, slog.Any("headers", headerAttr))
+ }
+
+ if len(r.payload) > 0 {
+ if attr, ok := processPayload(r.payload); ok {
+ groupValueAttrs = append(groupValueAttrs, attr)
+ }
+ }
+ return slog.GroupValue(groupValueAttrs...)
+}
+
+// httpResponse returns a lazily evaluated [slog.LogValuer] for a
+// [http.Response] and the associated body.
+func httpResponse(resp *http.Response, body []byte) slog.LogValuer {
+ return &response{
+ resp: resp,
+ payload: body,
+ }
+}
+
+type response struct {
+ resp *http.Response
+ payload []byte
+}
+
+func (r *response) LogValue() slog.Value {
+ if r == nil {
+ return slog.Value{}
+ }
+ var groupValueAttrs []slog.Attr
+ groupValueAttrs = append(groupValueAttrs, slog.String("status", fmt.Sprint(r.resp.StatusCode)))
+
+ var headerAttr []slog.Attr
+ for k, val := range r.resp.Header {
+ headerAttr = append(headerAttr, slog.String(k, strings.Join(val, ",")))
+ }
+ if len(headerAttr) > 0 {
+ groupValueAttrs = append(groupValueAttrs, slog.Any("headers", headerAttr))
+ }
+
+ if len(r.payload) > 0 {
+ if attr, ok := processPayload(r.payload); ok {
+ groupValueAttrs = append(groupValueAttrs, attr)
+ }
+ }
+ return slog.GroupValue(groupValueAttrs...)
+}
+
+func processPayload(payload []byte) (slog.Attr, bool) {
+ peekChar := payload[0]
+ if peekChar == '{' {
+ // JSON object
+ var m map[string]any
+ if err := json.Unmarshal(payload, &m); err == nil {
+ return slog.Any("payload", m), true
+ }
+ } else if peekChar == '[' {
+ // JSON array
+ var m []any
+ if err := json.Unmarshal(payload, &m); err == nil {
+ return slog.Any("payload", m), true
+ }
+ } else {
+ // Everything else
+ buf := &bytes.Buffer{}
+ if err := json.Compact(buf, payload); err != nil {
+ // Write raw payload incase of error
+ buf.Write(payload)
+ }
+ return slog.String("payload", buf.String()), true
+ }
+ return slog.Attr{}, false
+}
@@ -0,0 +1,872 @@
+// Copyright 2014 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Package metadata provides access to Google Compute Engine (GCE)
+// metadata and API service accounts.
+//
+// This package is a wrapper around the GCE metadata service,
+// as documented at https://cloud.google.com/compute/docs/metadata/overview.
+package metadata // import "cloud.google.com/go/compute/metadata"
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "log/slog"
+ "net"
+ "net/http"
+ "net/url"
+ "os"
+ "strings"
+ "sync"
+ "time"
+)
+
+const (
+ // metadataIP is the documented metadata server IP address.
+ metadataIP = "169.254.169.254"
+
+ // metadataHostEnv is the environment variable specifying the
+ // GCE metadata hostname. If empty, the default value of
+ // metadataIP ("169.254.169.254") is used instead.
+ // This is variable name is not defined by any spec, as far as
+ // I know; it was made up for the Go package.
+ metadataHostEnv = "GCE_METADATA_HOST"
+
+ userAgent = "gcloud-golang/0.1"
+)
+
+type cachedValue struct {
+ k string
+ trim bool
+ mu sync.Mutex
+ v string
+}
+
+var (
+ projID = &cachedValue{k: "project/project-id", trim: true}
+ projNum = &cachedValue{k: "project/numeric-project-id", trim: true}
+ instID = &cachedValue{k: "instance/id", trim: true}
+)
+
+var defaultClient = &Client{
+ hc: newDefaultHTTPClient(),
+ logger: slog.New(noOpHandler{}),
+}
+
+func newDefaultHTTPClient() *http.Client {
+ return &http.Client{
+ Transport: &http.Transport{
+ Dial: (&net.Dialer{
+ Timeout: 2 * time.Second,
+ KeepAlive: 30 * time.Second,
+ }).Dial,
+ IdleConnTimeout: 60 * time.Second,
+ },
+ Timeout: 5 * time.Second,
+ }
+}
+
+// NotDefinedError is returned when requested metadata is not defined.
+//
+// The underlying string is the suffix after "/computeMetadata/v1/".
+//
+// This error is not returned if the value is defined to be the empty
+// string.
+type NotDefinedError string
+
+func (suffix NotDefinedError) Error() string {
+ return fmt.Sprintf("metadata: GCE metadata %q not defined", string(suffix))
+}
+
+func (c *cachedValue) get(ctx context.Context, cl *Client) (v string, err error) {
+ defer c.mu.Unlock()
+ c.mu.Lock()
+ if c.v != "" {
+ return c.v, nil
+ }
+ if c.trim {
+ v, err = cl.getTrimmed(ctx, c.k)
+ } else {
+ v, err = cl.GetWithContext(ctx, c.k)
+ }
+ if err == nil {
+ c.v = v
+ }
+ return
+}
+
+var (
+ onGCEOnce sync.Once
+ onGCE bool
+)
+
+// OnGCE reports whether this process is running on Google Compute Platforms.
+// NOTE: True returned from `OnGCE` does not guarantee that the metadata server
+// is accessible from this process and have all the metadata defined.
+func OnGCE() bool {
+ onGCEOnce.Do(initOnGCE)
+ return onGCE
+}
+
+func initOnGCE() {
+ onGCE = testOnGCE()
+}
+
+func testOnGCE() bool {
+ // The user explicitly said they're on GCE, so trust them.
+ if os.Getenv(metadataHostEnv) != "" {
+ return true
+ }
+
+ ctx, cancel := context.WithCancel(context.Background())
+ defer cancel()
+
+ resc := make(chan bool, 2)
+
+ // Try two strategies in parallel.
+ // See https://github.com/googleapis/google-cloud-go/issues/194
+ go func() {
+ req, _ := http.NewRequest("GET", "http://"+metadataIP, nil)
+ req.Header.Set("User-Agent", userAgent)
+ res, err := newDefaultHTTPClient().Do(req.WithContext(ctx))
+ if err != nil {
+ resc <- false
+ return
+ }
+ defer res.Body.Close()
+ resc <- res.Header.Get("Metadata-Flavor") == "Google"
+ }()
+
+ go func() {
+ resolver := &net.Resolver{}
+ addrs, err := resolver.LookupHost(ctx, "metadata.google.internal.")
+ if err != nil || len(addrs) == 0 {
+ resc <- false
+ return
+ }
+ resc <- strsContains(addrs, metadataIP)
+ }()
+
+ tryHarder := systemInfoSuggestsGCE()
+ if tryHarder {
+ res := <-resc
+ if res {
+ // The first strategy succeeded, so let's use it.
+ return true
+ }
+ // Wait for either the DNS or metadata server probe to
+ // contradict the other one and say we are running on
+ // GCE. Give it a lot of time to do so, since the system
+ // info already suggests we're running on a GCE BIOS.
+ timer := time.NewTimer(5 * time.Second)
+ defer timer.Stop()
+ select {
+ case res = <-resc:
+ return res
+ case <-timer.C:
+ // Too slow. Who knows what this system is.
+ return false
+ }
+ }
+
+ // There's no hint from the system info that we're running on
+ // GCE, so use the first probe's result as truth, whether it's
+ // true or false. The goal here is to optimize for speed for
+ // users who are NOT running on GCE. We can't assume that
+ // either a DNS lookup or an HTTP request to a blackholed IP
+ // address is fast. Worst case this should return when the
+ // metaClient's Transport.ResponseHeaderTimeout or
+ // Transport.Dial.Timeout fires (in two seconds).
+ return <-resc
+}
+
+// Subscribe calls Client.SubscribeWithContext on the default client.
+//
+// Deprecated: Please use the context aware variant [SubscribeWithContext].
+func Subscribe(suffix string, fn func(v string, ok bool) error) error {
+ return defaultClient.SubscribeWithContext(context.Background(), suffix, func(ctx context.Context, v string, ok bool) error { return fn(v, ok) })
+}
+
+// SubscribeWithContext calls Client.SubscribeWithContext on the default client.
+func SubscribeWithContext(ctx context.Context, suffix string, fn func(ctx context.Context, v string, ok bool) error) error {
+ return defaultClient.SubscribeWithContext(ctx, suffix, fn)
+}
+
+// Get calls Client.GetWithContext on the default client.
+//
+// Deprecated: Please use the context aware variant [GetWithContext].
+func Get(suffix string) (string, error) {
+ return defaultClient.GetWithContext(context.Background(), suffix)
+}
+
+// GetWithContext calls Client.GetWithContext on the default client.
+func GetWithContext(ctx context.Context, suffix string) (string, error) {
+ return defaultClient.GetWithContext(ctx, suffix)
+}
+
+// ProjectID returns the current instance's project ID string.
+//
+// Deprecated: Please use the context aware variant [ProjectIDWithContext].
+func ProjectID() (string, error) {
+ return defaultClient.ProjectIDWithContext(context.Background())
+}
+
+// ProjectIDWithContext returns the current instance's project ID string.
+func ProjectIDWithContext(ctx context.Context) (string, error) {
+ return defaultClient.ProjectIDWithContext(ctx)
+}
+
+// NumericProjectID returns the current instance's numeric project ID.
+//
+// Deprecated: Please use the context aware variant [NumericProjectIDWithContext].
+func NumericProjectID() (string, error) {
+ return defaultClient.NumericProjectIDWithContext(context.Background())
+}
+
+// NumericProjectIDWithContext returns the current instance's numeric project ID.
+func NumericProjectIDWithContext(ctx context.Context) (string, error) {
+ return defaultClient.NumericProjectIDWithContext(ctx)
+}
+
+// InternalIP returns the instance's primary internal IP address.
+//
+// Deprecated: Please use the context aware variant [InternalIPWithContext].
+func InternalIP() (string, error) {
+ return defaultClient.InternalIPWithContext(context.Background())
+}
+
+// InternalIPWithContext returns the instance's primary internal IP address.
+func InternalIPWithContext(ctx context.Context) (string, error) {
+ return defaultClient.InternalIPWithContext(ctx)
+}
+
+// ExternalIP returns the instance's primary external (public) IP address.
+//
+// Deprecated: Please use the context aware variant [ExternalIPWithContext].
+func ExternalIP() (string, error) {
+ return defaultClient.ExternalIPWithContext(context.Background())
+}
+
+// ExternalIPWithContext returns the instance's primary external (public) IP address.
+func ExternalIPWithContext(ctx context.Context) (string, error) {
+ return defaultClient.ExternalIPWithContext(ctx)
+}
+
+// Email calls Client.EmailWithContext on the default client.
+//
+// Deprecated: Please use the context aware variant [EmailWithContext].
+func Email(serviceAccount string) (string, error) {
+ return defaultClient.EmailWithContext(context.Background(), serviceAccount)
+}
+
+// EmailWithContext calls Client.EmailWithContext on the default client.
+func EmailWithContext(ctx context.Context, serviceAccount string) (string, error) {
+ return defaultClient.EmailWithContext(ctx, serviceAccount)
+}
+
+// Hostname returns the instance's hostname. This will be of the form
+// "<instanceID>.c.<projID>.internal".
+//
+// Deprecated: Please use the context aware variant [HostnameWithContext].
+func Hostname() (string, error) {
+ return defaultClient.HostnameWithContext(context.Background())
+}
+
+// HostnameWithContext returns the instance's hostname. This will be of the form
+// "<instanceID>.c.<projID>.internal".
+func HostnameWithContext(ctx context.Context) (string, error) {
+ return defaultClient.HostnameWithContext(ctx)
+}
+
+// InstanceTags returns the list of user-defined instance tags,
+// assigned when initially creating a GCE instance.
+//
+// Deprecated: Please use the context aware variant [InstanceTagsWithContext].
+func InstanceTags() ([]string, error) {
+ return defaultClient.InstanceTagsWithContext(context.Background())
+}
+
+// InstanceTagsWithContext returns the list of user-defined instance tags,
+// assigned when initially creating a GCE instance.
+func InstanceTagsWithContext(ctx context.Context) ([]string, error) {
+ return defaultClient.InstanceTagsWithContext(ctx)
+}
+
+// InstanceID returns the current VM's numeric instance ID.
+//
+// Deprecated: Please use the context aware variant [InstanceIDWithContext].
+func InstanceID() (string, error) {
+ return defaultClient.InstanceIDWithContext(context.Background())
+}
+
+// InstanceIDWithContext returns the current VM's numeric instance ID.
+func InstanceIDWithContext(ctx context.Context) (string, error) {
+ return defaultClient.InstanceIDWithContext(ctx)
+}
+
+// InstanceName returns the current VM's instance ID string.
+//
+// Deprecated: Please use the context aware variant [InstanceNameWithContext].
+func InstanceName() (string, error) {
+ return defaultClient.InstanceNameWithContext(context.Background())
+}
+
+// InstanceNameWithContext returns the current VM's instance ID string.
+func InstanceNameWithContext(ctx context.Context) (string, error) {
+ return defaultClient.InstanceNameWithContext(ctx)
+}
+
+// Zone returns the current VM's zone, such as "us-central1-b".
+//
+// Deprecated: Please use the context aware variant [ZoneWithContext].
+func Zone() (string, error) {
+ return defaultClient.ZoneWithContext(context.Background())
+}
+
+// ZoneWithContext returns the current VM's zone, such as "us-central1-b".
+func ZoneWithContext(ctx context.Context) (string, error) {
+ return defaultClient.ZoneWithContext(ctx)
+}
+
+// InstanceAttributes calls Client.InstanceAttributesWithContext on the default client.
+//
+// Deprecated: Please use the context aware variant [InstanceAttributesWithContext.
+func InstanceAttributes() ([]string, error) {
+ return defaultClient.InstanceAttributesWithContext(context.Background())
+}
+
+// InstanceAttributesWithContext calls Client.ProjectAttributesWithContext on the default client.
+func InstanceAttributesWithContext(ctx context.Context) ([]string, error) {
+ return defaultClient.InstanceAttributesWithContext(ctx)
+}
+
+// ProjectAttributes calls Client.ProjectAttributesWithContext on the default client.
+//
+// Deprecated: Please use the context aware variant [ProjectAttributesWithContext].
+func ProjectAttributes() ([]string, error) {
+ return defaultClient.ProjectAttributesWithContext(context.Background())
+}
+
+// ProjectAttributesWithContext calls Client.ProjectAttributesWithContext on the default client.
+func ProjectAttributesWithContext(ctx context.Context) ([]string, error) {
+ return defaultClient.ProjectAttributesWithContext(ctx)
+}
+
+// InstanceAttributeValue calls Client.InstanceAttributeValueWithContext on the default client.
+//
+// Deprecated: Please use the context aware variant [InstanceAttributeValueWithContext].
+func InstanceAttributeValue(attr string) (string, error) {
+ return defaultClient.InstanceAttributeValueWithContext(context.Background(), attr)
+}
+
+// InstanceAttributeValueWithContext calls Client.InstanceAttributeValueWithContext on the default client.
+func InstanceAttributeValueWithContext(ctx context.Context, attr string) (string, error) {
+ return defaultClient.InstanceAttributeValueWithContext(ctx, attr)
+}
+
+// ProjectAttributeValue calls Client.ProjectAttributeValueWithContext on the default client.
+//
+// Deprecated: Please use the context aware variant [ProjectAttributeValueWithContext].
+func ProjectAttributeValue(attr string) (string, error) {
+ return defaultClient.ProjectAttributeValueWithContext(context.Background(), attr)
+}
+
+// ProjectAttributeValueWithContext calls Client.ProjectAttributeValueWithContext on the default client.
+func ProjectAttributeValueWithContext(ctx context.Context, attr string) (string, error) {
+ return defaultClient.ProjectAttributeValueWithContext(ctx, attr)
+}
+
+// Scopes calls Client.ScopesWithContext on the default client.
+//
+// Deprecated: Please use the context aware variant [ScopesWithContext].
+func Scopes(serviceAccount string) ([]string, error) {
+ return defaultClient.ScopesWithContext(context.Background(), serviceAccount)
+}
+
+// ScopesWithContext calls Client.ScopesWithContext on the default client.
+func ScopesWithContext(ctx context.Context, serviceAccount string) ([]string, error) {
+ return defaultClient.ScopesWithContext(ctx, serviceAccount)
+}
+
+func strsContains(ss []string, s string) bool {
+ for _, v := range ss {
+ if v == s {
+ return true
+ }
+ }
+ return false
+}
+
+// A Client provides metadata.
+type Client struct {
+ hc *http.Client
+ logger *slog.Logger
+}
+
+// Options for configuring a [Client].
+type Options struct {
+ // Client is the HTTP client used to make requests. Optional.
+ Client *http.Client
+ // Logger is used to log information about HTTP request and responses.
+ // If not provided, nothing will be logged. Optional.
+ Logger *slog.Logger
+}
+
+// NewClient returns a Client that can be used to fetch metadata.
+// Returns the client that uses the specified http.Client for HTTP requests.
+// If nil is specified, returns the default client.
+func NewClient(c *http.Client) *Client {
+ return NewWithOptions(&Options{
+ Client: c,
+ })
+}
+
+// NewWithOptions returns a Client that is configured with the provided Options.
+func NewWithOptions(opts *Options) *Client {
+ if opts == nil {
+ return defaultClient
+ }
+ client := opts.Client
+ if client == nil {
+ client = newDefaultHTTPClient()
+ }
+ logger := opts.Logger
+ if logger == nil {
+ logger = slog.New(noOpHandler{})
+ }
+ return &Client{hc: client, logger: logger}
+}
+
+// getETag returns a value from the metadata service as well as the associated ETag.
+// This func is otherwise equivalent to Get.
+func (c *Client) getETag(ctx context.Context, suffix string) (value, etag string, err error) {
+ // Using a fixed IP makes it very difficult to spoof the metadata service in
+ // a container, which is an important use-case for local testing of cloud
+ // deployments. To enable spoofing of the metadata service, the environment
+ // variable GCE_METADATA_HOST is first inspected to decide where metadata
+ // requests shall go.
+ host := os.Getenv(metadataHostEnv)
+ if host == "" {
+ // Using 169.254.169.254 instead of "metadata" here because Go
+ // binaries built with the "netgo" tag and without cgo won't
+ // know the search suffix for "metadata" is
+ // ".google.internal", and this IP address is documented as
+ // being stable anyway.
+ host = metadataIP
+ }
+ suffix = strings.TrimLeft(suffix, "/")
+ u := "http://" + host + "/computeMetadata/v1/" + suffix
+ req, err := http.NewRequestWithContext(ctx, "GET", u, nil)
+ if err != nil {
+ return "", "", err
+ }
+ req.Header.Set("Metadata-Flavor", "Google")
+ req.Header.Set("User-Agent", userAgent)
+ var res *http.Response
+ var reqErr error
+ var body []byte
+ retryer := newRetryer()
+ for {
+ c.logger.DebugContext(ctx, "metadata request", "request", httpRequest(req, nil))
+ res, reqErr = c.hc.Do(req)
+ var code int
+ if res != nil {
+ code = res.StatusCode
+ body, err = io.ReadAll(res.Body)
+ if err != nil {
+ res.Body.Close()
+ return "", "", err
+ }
+ c.logger.DebugContext(ctx, "metadata response", "response", httpResponse(res, body))
+ res.Body.Close()
+ }
+ if delay, shouldRetry := retryer.Retry(code, reqErr); shouldRetry {
+ if res != nil && res.Body != nil {
+ res.Body.Close()
+ }
+ if err := sleep(ctx, delay); err != nil {
+ return "", "", err
+ }
+ continue
+ }
+ break
+ }
+ if reqErr != nil {
+ return "", "", reqErr
+ }
+ if res.StatusCode == http.StatusNotFound {
+ return "", "", NotDefinedError(suffix)
+ }
+ if res.StatusCode != 200 {
+ return "", "", &Error{Code: res.StatusCode, Message: string(body)}
+ }
+ return string(body), res.Header.Get("Etag"), nil
+}
+
+// Get returns a value from the metadata service.
+// The suffix is appended to "http://${GCE_METADATA_HOST}/computeMetadata/v1/".
+//
+// If the GCE_METADATA_HOST environment variable is not defined, a default of
+// 169.254.169.254 will be used instead.
+//
+// If the requested metadata is not defined, the returned error will
+// be of type NotDefinedError.
+//
+// Deprecated: Please use the context aware variant [Client.GetWithContext].
+func (c *Client) Get(suffix string) (string, error) {
+ return c.GetWithContext(context.Background(), suffix)
+}
+
+// GetWithContext returns a value from the metadata service.
+// The suffix is appended to "http://${GCE_METADATA_HOST}/computeMetadata/v1/".
+//
+// If the GCE_METADATA_HOST environment variable is not defined, a default of
+// 169.254.169.254 will be used instead.
+//
+// If the requested metadata is not defined, the returned error will
+// be of type NotDefinedError.
+//
+// NOTE: Without an extra deadline in the context this call can take in the
+// worst case, with internal backoff retries, up to 15 seconds (e.g. when server
+// is responding slowly). Pass context with additional timeouts when needed.
+func (c *Client) GetWithContext(ctx context.Context, suffix string) (string, error) {
+ val, _, err := c.getETag(ctx, suffix)
+ return val, err
+}
+
+func (c *Client) getTrimmed(ctx context.Context, suffix string) (s string, err error) {
+ s, err = c.GetWithContext(ctx, suffix)
+ s = strings.TrimSpace(s)
+ return
+}
+
+func (c *Client) lines(ctx context.Context, suffix string) ([]string, error) {
+ j, err := c.GetWithContext(ctx, suffix)
+ if err != nil {
+ return nil, err
+ }
+ s := strings.Split(strings.TrimSpace(j), "\n")
+ for i := range s {
+ s[i] = strings.TrimSpace(s[i])
+ }
+ return s, nil
+}
+
+// ProjectID returns the current instance's project ID string.
+//
+// Deprecated: Please use the context aware variant [Client.ProjectIDWithContext].
+func (c *Client) ProjectID() (string, error) { return c.ProjectIDWithContext(context.Background()) }
+
+// ProjectIDWithContext returns the current instance's project ID string.
+func (c *Client) ProjectIDWithContext(ctx context.Context) (string, error) { return projID.get(ctx, c) }
+
+// NumericProjectID returns the current instance's numeric project ID.
+//
+// Deprecated: Please use the context aware variant [Client.NumericProjectIDWithContext].
+func (c *Client) NumericProjectID() (string, error) {
+ return c.NumericProjectIDWithContext(context.Background())
+}
+
+// NumericProjectIDWithContext returns the current instance's numeric project ID.
+func (c *Client) NumericProjectIDWithContext(ctx context.Context) (string, error) {
+ return projNum.get(ctx, c)
+}
+
+// InstanceID returns the current VM's numeric instance ID.
+//
+// Deprecated: Please use the context aware variant [Client.InstanceIDWithContext].
+func (c *Client) InstanceID() (string, error) {
+ return c.InstanceIDWithContext(context.Background())
+}
+
+// InstanceIDWithContext returns the current VM's numeric instance ID.
+func (c *Client) InstanceIDWithContext(ctx context.Context) (string, error) {
+ return instID.get(ctx, c)
+}
+
+// InternalIP returns the instance's primary internal IP address.
+//
+// Deprecated: Please use the context aware variant [Client.InternalIPWithContext].
+func (c *Client) InternalIP() (string, error) {
+ return c.InternalIPWithContext(context.Background())
+}
+
+// InternalIPWithContext returns the instance's primary internal IP address.
+func (c *Client) InternalIPWithContext(ctx context.Context) (string, error) {
+ return c.getTrimmed(ctx, "instance/network-interfaces/0/ip")
+}
+
+// Email returns the email address associated with the service account.
+//
+// Deprecated: Please use the context aware variant [Client.EmailWithContext].
+func (c *Client) Email(serviceAccount string) (string, error) {
+ return c.EmailWithContext(context.Background(), serviceAccount)
+}
+
+// EmailWithContext returns the email address associated with the service account.
+// The serviceAccount parameter default value (empty string or "default" value)
+// will use the instance's main account.
+func (c *Client) EmailWithContext(ctx context.Context, serviceAccount string) (string, error) {
+ if serviceAccount == "" {
+ serviceAccount = "default"
+ }
+ return c.getTrimmed(ctx, "instance/service-accounts/"+serviceAccount+"/email")
+}
+
+// ExternalIP returns the instance's primary external (public) IP address.
+//
+// Deprecated: Please use the context aware variant [Client.ExternalIPWithContext].
+func (c *Client) ExternalIP() (string, error) {
+ return c.ExternalIPWithContext(context.Background())
+}
+
+// ExternalIPWithContext returns the instance's primary external (public) IP address.
+func (c *Client) ExternalIPWithContext(ctx context.Context) (string, error) {
+ return c.getTrimmed(ctx, "instance/network-interfaces/0/access-configs/0/external-ip")
+}
+
+// Hostname returns the instance's hostname. This will be of the form
+// "<instanceID>.c.<projID>.internal".
+//
+// Deprecated: Please use the context aware variant [Client.HostnameWithContext].
+func (c *Client) Hostname() (string, error) {
+ return c.HostnameWithContext(context.Background())
+}
+
+// HostnameWithContext returns the instance's hostname. This will be of the form
+// "<instanceID>.c.<projID>.internal".
+func (c *Client) HostnameWithContext(ctx context.Context) (string, error) {
+ return c.getTrimmed(ctx, "instance/hostname")
+}
+
+// InstanceTags returns the list of user-defined instance tags.
+//
+// Deprecated: Please use the context aware variant [Client.InstanceTagsWithContext].
+func (c *Client) InstanceTags() ([]string, error) {
+ return c.InstanceTagsWithContext(context.Background())
+}
+
+// InstanceTagsWithContext returns the list of user-defined instance tags,
+// assigned when initially creating a GCE instance.
+func (c *Client) InstanceTagsWithContext(ctx context.Context) ([]string, error) {
+ var s []string
+ j, err := c.GetWithContext(ctx, "instance/tags")
+ if err != nil {
+ return nil, err
+ }
+ if err := json.NewDecoder(strings.NewReader(j)).Decode(&s); err != nil {
+ return nil, err
+ }
+ return s, nil
+}
+
+// InstanceName returns the current VM's instance ID string.
+//
+// Deprecated: Please use the context aware variant [Client.InstanceNameWithContext].
+func (c *Client) InstanceName() (string, error) {
+ return c.InstanceNameWithContext(context.Background())
+}
+
+// InstanceNameWithContext returns the current VM's instance ID string.
+func (c *Client) InstanceNameWithContext(ctx context.Context) (string, error) {
+ return c.getTrimmed(ctx, "instance/name")
+}
+
+// Zone returns the current VM's zone, such as "us-central1-b".
+//
+// Deprecated: Please use the context aware variant [Client.ZoneWithContext].
+func (c *Client) Zone() (string, error) {
+ return c.ZoneWithContext(context.Background())
+}
+
+// ZoneWithContext returns the current VM's zone, such as "us-central1-b".
+func (c *Client) ZoneWithContext(ctx context.Context) (string, error) {
+ zone, err := c.getTrimmed(ctx, "instance/zone")
+ // zone is of the form "projects/<projNum>/zones/<zoneName>".
+ if err != nil {
+ return "", err
+ }
+ return zone[strings.LastIndex(zone, "/")+1:], nil
+}
+
+// InstanceAttributes returns the list of user-defined attributes,
+// assigned when initially creating a GCE VM instance. The value of an
+// attribute can be obtained with InstanceAttributeValue.
+//
+// Deprecated: Please use the context aware variant [Client.InstanceAttributesWithContext].
+func (c *Client) InstanceAttributes() ([]string, error) {
+ return c.InstanceAttributesWithContext(context.Background())
+}
+
+// InstanceAttributesWithContext returns the list of user-defined attributes,
+// assigned when initially creating a GCE VM instance. The value of an
+// attribute can be obtained with InstanceAttributeValue.
+func (c *Client) InstanceAttributesWithContext(ctx context.Context) ([]string, error) {
+ return c.lines(ctx, "instance/attributes/")
+}
+
+// ProjectAttributes returns the list of user-defined attributes
+// applying to the project as a whole, not just this VM. The value of
+// an attribute can be obtained with ProjectAttributeValue.
+//
+// Deprecated: Please use the context aware variant [Client.ProjectAttributesWithContext].
+func (c *Client) ProjectAttributes() ([]string, error) {
+ return c.ProjectAttributesWithContext(context.Background())
+}
+
+// ProjectAttributesWithContext returns the list of user-defined attributes
+// applying to the project as a whole, not just this VM. The value of
+// an attribute can be obtained with ProjectAttributeValue.
+func (c *Client) ProjectAttributesWithContext(ctx context.Context) ([]string, error) {
+ return c.lines(ctx, "project/attributes/")
+}
+
+// InstanceAttributeValue returns the value of the provided VM
+// instance attribute.
+//
+// If the requested attribute is not defined, the returned error will
+// be of type NotDefinedError.
+//
+// InstanceAttributeValue may return ("", nil) if the attribute was
+// defined to be the empty string.
+//
+// Deprecated: Please use the context aware variant [Client.InstanceAttributeValueWithContext].
+func (c *Client) InstanceAttributeValue(attr string) (string, error) {
+ return c.InstanceAttributeValueWithContext(context.Background(), attr)
+}
+
+// InstanceAttributeValueWithContext returns the value of the provided VM
+// instance attribute.
+//
+// If the requested attribute is not defined, the returned error will
+// be of type NotDefinedError.
+//
+// InstanceAttributeValue may return ("", nil) if the attribute was
+// defined to be the empty string.
+func (c *Client) InstanceAttributeValueWithContext(ctx context.Context, attr string) (string, error) {
+ return c.GetWithContext(ctx, "instance/attributes/"+attr)
+}
+
+// ProjectAttributeValue returns the value of the provided
+// project attribute.
+//
+// If the requested attribute is not defined, the returned error will
+// be of type NotDefinedError.
+//
+// ProjectAttributeValue may return ("", nil) if the attribute was
+// defined to be the empty string.
+//
+// Deprecated: Please use the context aware variant [Client.ProjectAttributeValueWithContext].
+func (c *Client) ProjectAttributeValue(attr string) (string, error) {
+ return c.ProjectAttributeValueWithContext(context.Background(), attr)
+}
+
+// ProjectAttributeValueWithContext returns the value of the provided
+// project attribute.
+//
+// If the requested attribute is not defined, the returned error will
+// be of type NotDefinedError.
+//
+// ProjectAttributeValue may return ("", nil) if the attribute was
+// defined to be the empty string.
+func (c *Client) ProjectAttributeValueWithContext(ctx context.Context, attr string) (string, error) {
+ return c.GetWithContext(ctx, "project/attributes/"+attr)
+}
+
+// Scopes returns the service account scopes for the given account.
+// The account may be empty or the string "default" to use the instance's
+// main account.
+//
+// Deprecated: Please use the context aware variant [Client.ScopesWithContext].
+func (c *Client) Scopes(serviceAccount string) ([]string, error) {
+ return c.ScopesWithContext(context.Background(), serviceAccount)
+}
+
+// ScopesWithContext returns the service account scopes for the given account.
+// The account may be empty or the string "default" to use the instance's
+// main account.
+func (c *Client) ScopesWithContext(ctx context.Context, serviceAccount string) ([]string, error) {
+ if serviceAccount == "" {
+ serviceAccount = "default"
+ }
+ return c.lines(ctx, "instance/service-accounts/"+serviceAccount+"/scopes")
+}
+
+// Subscribe subscribes to a value from the metadata service.
+// The suffix is appended to "http://${GCE_METADATA_HOST}/computeMetadata/v1/".
+// The suffix may contain query parameters.
+//
+// Deprecated: Please use the context aware variant [Client.SubscribeWithContext].
+func (c *Client) Subscribe(suffix string, fn func(v string, ok bool) error) error {
+ return c.SubscribeWithContext(context.Background(), suffix, func(ctx context.Context, v string, ok bool) error { return fn(v, ok) })
+}
+
+// SubscribeWithContext subscribes to a value from the metadata service.
+// The suffix is appended to "http://${GCE_METADATA_HOST}/computeMetadata/v1/".
+// The suffix may contain query parameters.
+//
+// SubscribeWithContext calls fn with the latest metadata value indicated by the
+// provided suffix. If the metadata value is deleted, fn is called with the
+// empty string and ok false. Subscribe blocks until fn returns a non-nil error
+// or the value is deleted. Subscribe returns the error value returned from the
+// last call to fn, which may be nil when ok == false.
+func (c *Client) SubscribeWithContext(ctx context.Context, suffix string, fn func(ctx context.Context, v string, ok bool) error) error {
+ const failedSubscribeSleep = time.Second * 5
+
+ // First check to see if the metadata value exists at all.
+ val, lastETag, err := c.getETag(ctx, suffix)
+ if err != nil {
+ return err
+ }
+
+ if err := fn(ctx, val, true); err != nil {
+ return err
+ }
+
+ ok := true
+ if strings.ContainsRune(suffix, '?') {
+ suffix += "&wait_for_change=true&last_etag="
+ } else {
+ suffix += "?wait_for_change=true&last_etag="
+ }
+ for {
+ val, etag, err := c.getETag(ctx, suffix+url.QueryEscape(lastETag))
+ if err != nil {
+ if _, deleted := err.(NotDefinedError); !deleted {
+ time.Sleep(failedSubscribeSleep)
+ continue // Retry on other errors.
+ }
+ ok = false
+ }
+ lastETag = etag
+
+ if err := fn(ctx, val, ok); err != nil || !ok {
+ return err
+ }
+ }
+}
+
+// Error contains an error response from the server.
+type Error struct {
+ // Code is the HTTP response status code.
+ Code int
+ // Message is the server response message.
+ Message string
+}
+
+func (e *Error) Error() string {
+ return fmt.Sprintf("compute: Received %d `%s`", e.Code, e.Message)
+}
@@ -0,0 +1,114 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package metadata
+
+import (
+ "context"
+ "io"
+ "math/rand"
+ "net/http"
+ "time"
+)
+
+const (
+ maxRetryAttempts = 5
+)
+
+var (
+ syscallRetryable = func(error) bool { return false }
+)
+
+// defaultBackoff is basically equivalent to gax.Backoff without the need for
+// the dependency.
+type defaultBackoff struct {
+ max time.Duration
+ mul float64
+ cur time.Duration
+}
+
+func (b *defaultBackoff) Pause() time.Duration {
+ d := time.Duration(1 + rand.Int63n(int64(b.cur)))
+ b.cur = time.Duration(float64(b.cur) * b.mul)
+ if b.cur > b.max {
+ b.cur = b.max
+ }
+ return d
+}
+
+// sleep is the equivalent of gax.Sleep without the need for the dependency.
+func sleep(ctx context.Context, d time.Duration) error {
+ t := time.NewTimer(d)
+ select {
+ case <-ctx.Done():
+ t.Stop()
+ return ctx.Err()
+ case <-t.C:
+ return nil
+ }
+}
+
+func newRetryer() *metadataRetryer {
+ return &metadataRetryer{bo: &defaultBackoff{
+ cur: 100 * time.Millisecond,
+ max: 30 * time.Second,
+ mul: 2,
+ }}
+}
+
+type backoff interface {
+ Pause() time.Duration
+}
+
+type metadataRetryer struct {
+ bo backoff
+ attempts int
+}
+
+func (r *metadataRetryer) Retry(status int, err error) (time.Duration, bool) {
+ if status == http.StatusOK {
+ return 0, false
+ }
+ retryOk := shouldRetry(status, err)
+ if !retryOk {
+ return 0, false
+ }
+ if r.attempts == maxRetryAttempts {
+ return 0, false
+ }
+ r.attempts++
+ return r.bo.Pause(), true
+}
+
+func shouldRetry(status int, err error) bool {
+ if 500 <= status && status <= 599 {
+ return true
+ }
+ if err == io.ErrUnexpectedEOF {
+ return true
+ }
+ // Transient network errors should be retried.
+ if syscallRetryable(err) {
+ return true
+ }
+ if err, ok := err.(interface{ Temporary() bool }); ok {
+ if err.Temporary() {
+ return true
+ }
+ }
+ if err, ok := err.(interface{ Unwrap() error }); ok {
+ return shouldRetry(status, err.Unwrap())
+ }
+ return false
+}
@@ -0,0 +1,31 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build linux
+// +build linux
+
+package metadata
+
+import (
+ "errors"
+ "syscall"
+)
+
+func init() {
+ // Initialize syscallRetryable to return true on transient socket-level
+ // errors. These errors are specific to Linux.
+ syscallRetryable = func(err error) bool {
+ return errors.Is(err, syscall.ECONNRESET) || errors.Is(err, syscall.ECONNREFUSED)
+ }
+}
@@ -0,0 +1,26 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build !windows && !linux
+
+package metadata
+
+// systemInfoSuggestsGCE reports whether the local system (without
+// doing network requests) suggests that we're running on GCE. If this
+// returns true, testOnGCE tries a bit harder to reach its metadata
+// server.
+func systemInfoSuggestsGCE() bool {
+ // We don't currently have checks for other GOOS
+ return false
+}
@@ -0,0 +1,28 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build linux
+
+package metadata
+
+import (
+ "os"
+ "strings"
+)
+
+func systemInfoSuggestsGCE() bool {
+ b, _ := os.ReadFile("/sys/class/dmi/id/product_name")
+ name := strings.TrimSpace(string(b))
+ return name == "Google" || name == "Google Compute Engine"
+}
@@ -0,0 +1,38 @@
+// Copyright 2024 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//go:build windows
+
+package metadata
+
+import (
+ "strings"
+
+ "golang.org/x/sys/windows/registry"
+)
+
+func systemInfoSuggestsGCE() bool {
+ k, err := registry.OpenKey(registry.LOCAL_MACHINE, `SYSTEM\HardwareConfig\Current`, registry.QUERY_VALUE)
+ if err != nil {
+ return false
+ }
+ defer k.Close()
+
+ s, _, err := k.GetStringValue("SystemProductName")
+ if err != nil {
+ return false
+ }
+ s = strings.TrimSpace(s)
+ return strings.HasPrefix(s, "Google")
+}
@@ -0,0 +1,849 @@
+# Release History
+
+## 1.17.0 (2025-01-07)
+
+### Features Added
+
+* Added field `OperationLocationResultPath` to `runtime.NewPollerOptions[T]` for LROs that use the `Operation-Location` pattern.
+* Support `encoding.TextMarshaler` and `encoding.TextUnmarshaler` interfaces in `arm.ResourceID`.
+
+## 1.16.0 (2024-10-17)
+
+### Features Added
+
+* Added field `Kind` to `runtime.StartSpanOptions` to allow a kind to be set when starting a span.
+
+### Bugs Fixed
+
+* `BearerTokenPolicy` now rewinds request bodies before retrying
+
+## 1.15.0 (2024-10-14)
+
+### Features Added
+
+* `BearerTokenPolicy` handles CAE claims challenges
+
+### Bugs Fixed
+
+* Omit the `ResponseError.RawResponse` field from JSON marshaling so instances can be marshaled.
+* Fixed an integer overflow in the retry policy.
+
+### Other Changes
+
+* Update dependencies.
+
+## 1.14.0 (2024-08-07)
+
+### Features Added
+
+* Added field `Attributes` to `runtime.StartSpanOptions` to simplify creating spans with attributes.
+
+### Other Changes
+
+* Include the HTTP verb and URL in `log.EventRetryPolicy` log entries so it's clear which operation is being retried.
+
+## 1.13.0 (2024-07-16)
+
+### Features Added
+
+- Added runtime.NewRequestFromRequest(), allowing for a policy.Request to be created from an existing *http.Request.
+
+## 1.12.0 (2024-06-06)
+
+### Features Added
+
+* Added field `StatusCodes` to `runtime.FetcherForNextLinkOptions` allowing for additional HTTP status codes indicating success.
+* Added func `NewUUID` to the `runtime` package for generating UUIDs.
+
+### Bugs Fixed
+
+* Fixed an issue that prevented pollers using the `Operation-Location` strategy from unmarshaling the final result in some cases.
+
+### Other Changes
+
+* Updated dependencies.
+
+## 1.11.1 (2024-04-02)
+
+### Bugs Fixed
+
+* Pollers that use the `Location` header won't consider `http.StatusRequestTimeout` a terminal failure.
+* `runtime.Poller[T].Result` won't consider non-terminal error responses as terminal.
+
+## 1.11.0 (2024-04-01)
+
+### Features Added
+
+* Added `StatusCodes` to `arm/policy.RegistrationOptions` to allow supporting non-standard HTTP status codes during registration.
+* Added field `InsecureAllowCredentialWithHTTP` to `azcore.ClientOptions` and dependent authentication pipeline policies.
+* Added type `MultipartContent` to the `streaming` package to support multipart/form payloads with custom Content-Type and file name.
+
+### Bugs Fixed
+
+* `runtime.SetMultipartFormData` won't try to stringify `[]byte` values.
+* Pollers that use the `Location` header won't consider `http.StatusTooManyRequests` a terminal failure.
+
+### Other Changes
+
+* Update dependencies.
+
+## 1.10.0 (2024-02-29)
+
+### Features Added
+
+* Added logging event `log.EventResponseError` that will contain the contents of `ResponseError.Error()` whenever an `azcore.ResponseError` is created.
+* Added `runtime.NewResponseErrorWithErrorCode` for creating an `azcore.ResponseError` with a caller-supplied error code.
+* Added type `MatchConditions` for use in conditional requests.
+
+### Bugs Fixed
+
+* Fixed a potential race condition between `NullValue` and `IsNullValue`.
+* `runtime.EncodeQueryParams` will escape semicolons before calling `url.ParseQuery`.
+
+### Other Changes
+
+* Update dependencies.
+
+## 1.9.2 (2024-02-06)
+
+### Bugs Fixed
+
+* `runtime.MarshalAsByteArray` and `runtime.MarshalAsJSON` will preserve the preexisting value of the `Content-Type` header.
+
+### Other Changes
+
+* Update to latest version of `internal`.
+
+## 1.9.1 (2023-12-11)
+
+### Bugs Fixed
+
+* The `retry-after-ms` and `x-ms-retry-after-ms` headers weren't being checked during retries.
+
+### Other Changes
+
+* Update dependencies.
+
+## 1.9.0 (2023-11-06)
+
+### Breaking Changes
+> These changes affect only code written against previous beta versions of `v1.7.0` and `v1.8.0`
+* The function `NewTokenCredential` has been removed from the `fake` package. Use a literal `&fake.TokenCredential{}` instead.
+* The field `TracingNamespace` in `runtime.PipelineOptions` has been replaced by `TracingOptions`.
+
+### Bugs Fixed
+
+* Fixed an issue that could cause some allowed HTTP header values to not show up in logs.
+* Include error text instead of error type in traces when the transport returns an error.
+* Fixed an issue that could cause an HTTP/2 request to hang when the TCP connection becomes unresponsive.
+* Block key and SAS authentication for non TLS protected endpoints.
+* Passing a `nil` credential value will no longer cause a panic. Instead, the authentication is skipped.
+* Calling `Error` on a zero-value `azcore.ResponseError` will no longer panic.
+* Fixed an issue in `fake.PagerResponder[T]` that would cause a trailing error to be omitted when iterating over pages.
+* Context values created by `azcore` will no longer flow across disjoint HTTP requests.
+
+### Other Changes
+
+* Skip generating trace info for no-op tracers.
+* The `clientName` paramater in client constructors has been renamed to `moduleName`.
+
+## 1.9.0-beta.1 (2023-10-05)
+
+### Other Changes
+
+* The beta features for tracing and fakes have been reinstated.
+
+## 1.8.0 (2023-10-05)
+
+### Features Added
+
+* This includes the following features from `v1.8.0-beta.N` releases.
+ * Claims and CAE for authentication.
+ * New `messaging` package.
+ * Various helpers in the `runtime` package.
+ * Deprecation of `runtime.With*` funcs and their replacements in the `policy` package.
+* Added types `KeyCredential` and `SASCredential` to the `azcore` package.
+ * Includes their respective constructor functions.
+* Added types `KeyCredentialPolicy` and `SASCredentialPolicy` to the `azcore/runtime` package.
+ * Includes their respective constructor functions and options types.
+
+### Breaking Changes
+> These changes affect only code written against beta versions of `v1.8.0`
+* The beta features for tracing and fakes have been omitted for this release.
+
+### Bugs Fixed
+
+* Fixed an issue that could cause some ARM RPs to not be automatically registered.
+* Block bearer token authentication for non TLS protected endpoints.
+
+### Other Changes
+
+* Updated dependencies.
+
+## 1.8.0-beta.3 (2023-09-07)
+
+### Features Added
+
+* Added function `FetcherForNextLink` and `FetcherForNextLinkOptions` to the `runtime` package to centralize creation of `Pager[T].Fetcher` from a next link URL.
+
+### Bugs Fixed
+
+* Suppress creating spans for nested SDK API calls. The HTTP span will be a child of the outer API span.
+
+### Other Changes
+
+* The following functions in the `runtime` package are now exposed from the `policy` package, and the `runtime` versions have been deprecated.
+ * `WithCaptureResponse`
+ * `WithHTTPHeader`
+ * `WithRetryOptions`
+
+## 1.7.2 (2023-09-06)
+
+### Bugs Fixed
+
+* Fix default HTTP transport to work in WASM modules.
+
+## 1.8.0-beta.2 (2023-08-14)
+
+### Features Added
+
+* Added function `SanitizePagerPollerPath` to the `server` package to centralize sanitization and formalize the contract.
+* Added `TokenRequestOptions.EnableCAE` to indicate whether to request a CAE token.
+
+### Breaking Changes
+
+> This change affects only code written against beta version `v1.8.0-beta.1`.
+* `messaging.CloudEvent` deserializes JSON objects as `[]byte`, instead of `json.RawMessage`. See the documentation for CloudEvent.Data for more information.
+
+> This change affects only code written against beta versions `v1.7.0-beta.2` and `v1.8.0-beta.1`.
+* Removed parameter from method `Span.End()` and its type `tracing.SpanEndOptions`. This API GA'ed in `v1.2.0` so we cannot change it.
+
+### Bugs Fixed
+
+* Propagate any query parameters when constructing a fake poller and/or injecting next links.
+
+## 1.7.1 (2023-08-14)
+
+## Bugs Fixed
+
+* Enable TLS renegotiation in the default transport policy.
+
+## 1.8.0-beta.1 (2023-07-12)
+
+### Features Added
+
+- `messaging/CloudEvent` allows you to serialize/deserialize CloudEvents, as described in the CloudEvents 1.0 specification: [link](https://github.com/cloudevents/spec)
+
+### Other Changes
+
+* The beta features for CAE, tracing, and fakes have been reinstated.
+
+## 1.7.0 (2023-07-12)
+
+### Features Added
+* Added method `WithClientName()` to type `azcore.Client` to support shallow cloning of a client with a new name used for tracing.
+
+### Breaking Changes
+> These changes affect only code written against beta versions v1.7.0-beta.1 or v1.7.0-beta.2
+* The beta features for CAE, tracing, and fakes have been omitted for this release.
+
+## 1.7.0-beta.2 (2023-06-06)
+
+### Breaking Changes
+> These changes affect only code written against beta version v1.7.0-beta.1
+* Method `SpanFromContext()` on type `tracing.Tracer` had the `bool` return value removed.
+ * This includes the field `SpanFromContext` in supporting type `tracing.TracerOptions`.
+* Method `AddError()` has been removed from type `tracing.Span`.
+* Method `Span.End()` now requires an argument of type `*tracing.SpanEndOptions`.
+
+## 1.6.1 (2023-06-06)
+
+### Bugs Fixed
+* Fixed an issue in `azcore.NewClient()` and `arm.NewClient()` that could cause an incorrect module name to be used in telemetry.
+
+### Other Changes
+* This version contains all bug fixes from `v1.7.0-beta.1`
+
+## 1.7.0-beta.1 (2023-05-24)
+
+### Features Added
+* Restored CAE support for ARM clients.
+* Added supporting features to enable distributed tracing.
+ * Added func `runtime.StartSpan()` for use by SDKs to start spans.
+ * Added method `WithContext()` to `runtime.Request` to support shallow cloning with a new context.
+ * Added field `TracingNamespace` to `runtime.PipelineOptions`.
+ * Added field `Tracer` to `runtime.NewPollerOptions` and `runtime.NewPollerFromResumeTokenOptions` types.
+ * Added field `SpanFromContext` to `tracing.TracerOptions`.
+ * Added methods `Enabled()`, `SetAttributes()`, and `SpanFromContext()` to `tracing.Tracer`.
+ * Added supporting pipeline policies to include HTTP spans when creating clients.
+* Added package `fake` to support generated fakes packages in SDKs.
+ * The package contains public surface area exposed by fake servers and supporting APIs intended only for use by the fake server implementations.
+ * Added an internal fake poller implementation.
+
+### Bugs Fixed
+* Retry policy always clones the underlying `*http.Request` before invoking the next policy.
+* Added some non-standard error codes to the list of error codes for unregistered resource providers.
+
+## 1.6.0 (2023-05-04)
+
+### Features Added
+* Added support for ARM cross-tenant authentication. Set the `AuxiliaryTenants` field of `arm.ClientOptions` to enable.
+* Added `TenantID` field to `policy.TokenRequestOptions`.
+
+## 1.5.0 (2023-04-06)
+
+### Features Added
+* Added `ShouldRetry` to `policy.RetryOptions` for finer-grained control over when to retry.
+
+### Breaking Changes
+> These changes affect only code written against a beta version such as v1.5.0-beta.1
+> These features will return in v1.6.0-beta.1.
+* Removed `TokenRequestOptions.Claims` and `.TenantID`
+* Removed ARM client support for CAE and cross-tenant auth.
+
+### Bugs Fixed
+* Added non-conformant LRO terminal states `Cancelled` and `Completed`.
+
+### Other Changes
+* Updated to latest `internal` module.
+
+## 1.5.0-beta.1 (2023-03-02)
+
+### Features Added
+* This release includes the features added in v1.4.0-beta.1
+
+## 1.4.0 (2023-03-02)
+> This release doesn't include features added in v1.4.0-beta.1. They will return in v1.5.0-beta.1.
+
+### Features Added
+* Add `Clone()` method for `arm/policy.ClientOptions`.
+
+### Bugs Fixed
+* ARM's RP registration policy will no longer swallow unrecognized errors.
+* Fixed an issue in `runtime.NewPollerFromResumeToken()` when resuming a `Poller` with a custom `PollingHandler`.
+* Fixed wrong policy copy in `arm/runtime.NewPipeline()`.
+
+## 1.4.0-beta.1 (2023-02-02)
+
+### Features Added
+* Added support for ARM cross-tenant authentication. Set the `AuxiliaryTenants` field of `arm.ClientOptions` to enable.
+* Added `Claims` and `TenantID` fields to `policy.TokenRequestOptions`.
+* ARM bearer token policy handles CAE challenges.
+
+## 1.3.1 (2023-02-02)
+
+### Other Changes
+* Update dependencies to latest versions.
+
+## 1.3.0 (2023-01-06)
+
+### Features Added
+* Added `BearerTokenOptions.AuthorizationHandler` to enable extending `runtime.BearerTokenPolicy`
+ with custom authorization logic
+* Added `Client` types and matching constructors to the `azcore` and `arm` packages. These represent a basic client for HTTP and ARM respectively.
+
+### Other Changes
+* Updated `internal` module to latest version.
+* `policy/Request.SetBody()` allows replacing a request's body with an empty one
+
+## 1.2.0 (2022-11-04)
+
+### Features Added
+* Added `ClientOptions.APIVersion` field, which overrides the default version a client
+ requests of the service, if the client supports this (all ARM clients do).
+* Added package `tracing` that contains the building blocks for distributed tracing.
+* Added field `TracingProvider` to type `policy.ClientOptions` that will be used to set the per-client tracing implementation.
+
+### Bugs Fixed
+* Fixed an issue in `runtime.SetMultipartFormData` to properly handle slices of `io.ReadSeekCloser`.
+* Fixed the MaxRetryDelay default to be 60s.
+* Failure to poll the state of an LRO will now return an `*azcore.ResponseError` for poller types that require this behavior.
+* Fixed a bug in `runtime.NewPipeline` that would cause pipeline-specified allowed headers and query parameters to be lost.
+
+### Other Changes
+* Retain contents of read-only fields when sending requests.
+
+## 1.1.4 (2022-10-06)
+
+### Bugs Fixed
+* Don't retry a request if the `Retry-After` delay is greater than the configured `RetryOptions.MaxRetryDelay`.
+* `runtime.JoinPaths`: do not unconditionally add a forward slash before the query string
+
+### Other Changes
+* Removed logging URL from retry policy as it's redundant.
+* Retry policy logs when it exits due to a non-retriable status code.
+
+## 1.1.3 (2022-09-01)
+
+### Bugs Fixed
+* Adjusted the initial retry delay to 800ms per the Azure SDK guidelines.
+
+## 1.1.2 (2022-08-09)
+
+### Other Changes
+* Fixed various doc bugs.
+
+## 1.1.1 (2022-06-30)
+
+### Bugs Fixed
+* Avoid polling when a RELO LRO synchronously terminates.
+
+## 1.1.0 (2022-06-03)
+
+### Other Changes
+* The one-second floor for `Frequency` when calling `PollUntilDone()` has been removed when running tests.
+
+## 1.0.0 (2022-05-12)
+
+### Features Added
+* Added interface `runtime.PollingHandler` to support custom poller implementations.
+ * Added field `PollingHandler` of this type to `runtime.NewPollerOptions[T]` and `runtime.NewPollerFromResumeTokenOptions[T]`.
+
+### Breaking Changes
+* Renamed `cloud.Configuration.LoginEndpoint` to `.ActiveDirectoryAuthorityHost`
+* Renamed `cloud.AzurePublicCloud` to `cloud.AzurePublic`
+* Removed `AuxiliaryTenants` field from `arm/ClientOptions` and `arm/policy/BearerTokenOptions`
+* Removed `TokenRequestOptions.TenantID`
+* `Poller[T].PollUntilDone()` now takes an `options *PollUntilDoneOptions` param instead of `freq time.Duration`
+* Removed `arm/runtime.Poller[T]`, `arm/runtime.NewPoller[T]()` and `arm/runtime.NewPollerFromResumeToken[T]()`
+* Removed `arm/runtime.FinalStateVia` and related `const` values
+* Renamed `runtime.PageProcessor` to `runtime.PagingHandler`
+* The `arm/runtime.ProviderRepsonse` and `arm/runtime.Provider` types are no longer exported.
+* Renamed `NewRequestIdPolicy()` to `NewRequestIDPolicy()`
+* `TokenCredential.GetToken` now returns `AccessToken` by value.
+
+### Bugs Fixed
+* When per-try timeouts are enabled, only cancel the context after the body has been read and closed.
+* The `Operation-Location` poller now properly handles `final-state-via` values.
+* Improvements in `runtime.Poller[T]`
+ * `Poll()` shouldn't cache errors, allowing for additional retries when in a non-terminal state.
+ * `Result()` will cache the terminal result or error but not transient errors, allowing for additional retries.
+
+### Other Changes
+* Updated to latest `internal` module and absorbed breaking changes.
+ * Use `temporal.Resource` and deleted copy.
+* The internal poller implementation has been refactored.
+ * The implementation in `internal/pollers/poller.go` has been merged into `runtime/poller.go` with some slight modification.
+ * The internal poller types had their methods updated to conform to the `runtime.PollingHandler` interface.
+ * The creation of resume tokens has been refactored so that implementers of `runtime.PollingHandler` don't need to know about it.
+* `NewPipeline()` places policies from `ClientOptions` after policies from `PipelineOptions`
+* Default User-Agent headers no longer include `azcore` version information
+
+## 0.23.1 (2022-04-14)
+
+### Bugs Fixed
+* Include XML header when marshalling XML content.
+* Handle XML namespaces when searching for error code.
+* Handle `odata.error` when searching for error code.
+
+## 0.23.0 (2022-04-04)
+
+### Features Added
+* Added `runtime.Pager[T any]` and `runtime.Poller[T any]` supporting types for central, generic, implementations.
+* Added `cloud` package with a new API for cloud configuration
+* Added `FinalStateVia` field to `runtime.NewPollerOptions[T any]` type.
+
+### Breaking Changes
+* Removed the `Poller` type-alias to the internal poller implementation.
+* Added `Ptr[T any]` and `SliceOfPtrs[T any]` in the `to` package and removed all non-generic implementations.
+* `NullValue` and `IsNullValue` now take a generic type parameter instead of an interface func parameter.
+* Replaced `arm.Endpoint` with `cloud` API
+ * Removed the `endpoint` parameter from `NewRPRegistrationPolicy()`
+ * `arm/runtime.NewPipeline()` and `.NewRPRegistrationPolicy()` now return an `error`
+* Refactored `NewPoller` and `NewPollerFromResumeToken` funcs in `arm/runtime` and `runtime` packages.
+ * Removed the `pollerID` parameter as it's no longer required.
+ * Created optional parameter structs and moved optional parameters into them.
+* Changed `FinalStateVia` field to a `const` type.
+
+### Other Changes
+* Converted expiring resource and dependent types to use generics.
+
+## 0.22.0 (2022-03-03)
+
+### Features Added
+* Added header `WWW-Authenticate` to the default allow-list of headers for logging.
+* Added a pipeline policy that enables the retrieval of HTTP responses from API calls.
+ * Added `runtime.WithCaptureResponse` to enable the policy at the API level (off by default).
+
+### Breaking Changes
+* Moved `WithHTTPHeader` and `WithRetryOptions` from the `policy` package to the `runtime` package.
+
+## 0.21.1 (2022-02-04)
+
+### Bugs Fixed
+* Restore response body after reading in `Poller.FinalResponse()`. (#16911)
+* Fixed bug in `NullValue` that could lead to incorrect comparisons for empty maps/slices (#16969)
+
+### Other Changes
+* `BearerTokenPolicy` is more resilient to transient authentication failures. (#16789)
+
+## 0.21.0 (2022-01-11)
+
+### Features Added
+* Added `AllowedHeaders` and `AllowedQueryParams` to `policy.LogOptions` to control which headers and query parameters are written to the logger.
+* Added `azcore.ResponseError` type which is returned from APIs when a non-success HTTP status code is received.
+
+### Breaking Changes
+* Moved `[]policy.Policy` parameters of `arm/runtime.NewPipeline` and `runtime.NewPipeline` into a new struct, `runtime.PipelineOptions`
+* Renamed `arm/ClientOptions.Host` to `.Endpoint`
+* Moved `Request.SkipBodyDownload` method to function `runtime.SkipBodyDownload`
+* Removed `azcore.HTTPResponse` interface type
+* `arm.NewPoller()` and `runtime.NewPoller()` no longer require an `eu` parameter
+* `runtime.NewResponseError()` no longer requires an `error` parameter
+
+## 0.20.0 (2021-10-22)
+
+### Breaking Changes
+* Removed `arm.Connection`
+* Removed `azcore.Credential` and `.NewAnonymousCredential()`
+ * `NewRPRegistrationPolicy` now requires an `azcore.TokenCredential`
+* `runtime.NewPipeline` has a new signature that simplifies implementing custom authentication
+* `arm/runtime.RegistrationOptions` embeds `policy.ClientOptions`
+* Contents in the `log` package have been slightly renamed.
+* Removed `AuthenticationOptions` in favor of `policy.BearerTokenOptions`
+* Changed parameters for `NewBearerTokenPolicy()`
+* Moved policy config options out of `arm/runtime` and into `arm/policy`
+
+### Features Added
+* Updating Documentation
+* Added string typdef `arm.Endpoint` to provide a hint toward expected ARM client endpoints
+* `azcore.ClientOptions` contains common pipeline configuration settings
+* Added support for multi-tenant authorization in `arm/runtime`
+* Require one second minimum when calling `PollUntilDone()`
+
+### Bug Fixes
+* Fixed a potential panic when creating the default Transporter.
+* Close LRO initial response body when creating a poller.
+* Fixed a panic when recursively cloning structs that contain time.Time.
+
+## 0.19.0 (2021-08-25)
+
+### Breaking Changes
+* Split content out of `azcore` into various packages. The intent is to separate content based on its usage (common, uncommon, SDK authors).
+ * `azcore` has all core functionality.
+ * `log` contains facilities for configuring in-box logging.
+ * `policy` is used for configuring pipeline options and creating custom pipeline policies.
+ * `runtime` contains various helpers used by SDK authors and generated content.
+ * `streaming` has helpers for streaming IO operations.
+* `NewTelemetryPolicy()` now requires module and version parameters and the `Value` option has been removed.
+ * As a result, the `Request.Telemetry()` method has been removed.
+* The telemetry policy now includes the SDK prefix `azsdk-go-` so callers no longer need to provide it.
+* The `*http.Request` in `runtime.Request` is no longer anonymously embedded. Use the `Raw()` method to access it.
+* The `UserAgent` and `Version` constants have been made internal, `Module` and `Version` respectively.
+
+### Bug Fixes
+* Fixed an issue in the retry policy where the request body could be overwritten after a rewind.
+
+### Other Changes
+* Moved modules `armcore` and `to` content into `arm` and `to` packages respectively.
+ * The `Pipeline()` method on `armcore.Connection` has been replaced by `NewPipeline()` in `arm.Connection`. It takes module and version parameters used by the telemetry policy.
+* Poller logic has been consolidated across ARM and core implementations.
+ * This required some changes to the internal interfaces for core pollers.
+* The core poller types have been improved, including more logging and test coverage.
+
+## 0.18.1 (2021-08-20)
+
+### Features Added
+* Adds an `ETag` type for comparing etags and handling etags on requests
+* Simplifies the `requestBodyProgess` and `responseBodyProgress` into a single `progress` object
+
+### Bugs Fixed
+* `JoinPaths` will preserve query parameters encoded in the `root` url.
+
+### Other Changes
+* Bumps dependency on `internal` module to the latest version (v0.7.0)
+
+## 0.18.0 (2021-07-29)
+### Features Added
+* Replaces methods from Logger type with two package methods for interacting with the logging functionality.
+* `azcore.SetClassifications` replaces `azcore.Logger().SetClassifications`
+* `azcore.SetListener` replaces `azcore.Logger().SetListener`
+
+### Breaking Changes
+* Removes `Logger` type from `azcore`
+
+
+## 0.17.0 (2021-07-27)
+### Features Added
+* Adding TenantID to TokenRequestOptions (https://github.com/Azure/azure-sdk-for-go/pull/14879)
+* Adding AuxiliaryTenants to AuthenticationOptions (https://github.com/Azure/azure-sdk-for-go/pull/15123)
+
+### Breaking Changes
+* Rename `AnonymousCredential` to `NewAnonymousCredential` (https://github.com/Azure/azure-sdk-for-go/pull/15104)
+* rename `AuthenticationPolicyOptions` to `AuthenticationOptions` (https://github.com/Azure/azure-sdk-for-go/pull/15103)
+* Make Header constants private (https://github.com/Azure/azure-sdk-for-go/pull/15038)
+
+
+## 0.16.2 (2021-05-26)
+### Features Added
+* Improved support for byte arrays [#14715](https://github.com/Azure/azure-sdk-for-go/pull/14715)
+
+
+## 0.16.1 (2021-05-19)
+### Features Added
+* Add license.txt to azcore module [#14682](https://github.com/Azure/azure-sdk-for-go/pull/14682)
+
+
+## 0.16.0 (2021-05-07)
+### Features Added
+* Remove extra `*` in UnmarshalAsByteArray() [#14642](https://github.com/Azure/azure-sdk-for-go/pull/14642)
+
+
+## 0.15.1 (2021-05-06)
+### Features Added
+* Cache the original request body on Request [#14634](https://github.com/Azure/azure-sdk-for-go/pull/14634)
+
+
+## 0.15.0 (2021-05-05)
+### Features Added
+* Add support for null map and slice
+* Export `Response.Payload` method
+
+### Breaking Changes
+* remove `Response.UnmarshalError` as it's no longer required
+
+
+## 0.14.5 (2021-04-23)
+### Features Added
+* Add `UnmarshalError()` on `azcore.Response`
+
+
+## 0.14.4 (2021-04-22)
+### Features Added
+* Support for basic LRO polling
+* Added type `LROPoller` and supporting types for basic polling on long running operations.
+* rename poller param and added doc comment
+
+### Bugs Fixed
+* Fixed content type detection bug in logging.
+
+
+## 0.14.3 (2021-03-29)
+### Features Added
+* Add support for multi-part form data
+* Added method `WriteMultipartFormData()` to Request.
+
+
+## 0.14.2 (2021-03-17)
+### Features Added
+* Add support for encoding JSON null values
+* Adds `NullValue()` and `IsNullValue()` functions for setting and detecting sentinel values used for encoding a JSON null.
+* Documentation fixes
+
+### Bugs Fixed
+* Fixed improper error wrapping
+
+
+## 0.14.1 (2021-02-08)
+### Features Added
+* Add `Pager` and `Poller` interfaces to azcore
+
+
+## 0.14.0 (2021-01-12)
+### Features Added
+* Accept zero-value options for default values
+* Specify zero-value options structs to accept default values.
+* Remove `DefaultXxxOptions()` methods.
+* Do not silently change TryTimeout on negative values
+* make per-try timeout opt-in
+
+
+## 0.13.4 (2020-11-20)
+### Features Added
+* Include telemetry string in User Agent
+
+
+## 0.13.3 (2020-11-20)
+### Features Added
+* Updating response body handling on `azcore.Response`
+
+
+## 0.13.2 (2020-11-13)
+### Features Added
+* Remove implementation of stateless policies as first-class functions.
+
+
+## 0.13.1 (2020-11-05)
+### Features Added
+* Add `Telemetry()` method to `azcore.Request()`
+
+
+## 0.13.0 (2020-10-14)
+### Features Added
+* Rename `log` to `logger` to avoid name collision with the log package.
+* Documentation improvements
+* Simplified `DefaultHTTPClientTransport()` implementation
+
+
+## 0.12.1 (2020-10-13)
+### Features Added
+* Update `internal` module dependence to `v0.5.0`
+
+
+## 0.12.0 (2020-10-08)
+### Features Added
+* Removed storage specific content
+* Removed internal content to prevent API clutter
+* Refactored various policy options to conform with our options pattern
+
+
+## 0.11.0 (2020-09-22)
+### Features Added
+
+* Removed `LogError` and `LogSlowResponse`.
+* Renamed `options` in `RequestLogOptions`.
+* Updated `NewRequestLogPolicy()` to follow standard pattern for options.
+* Refactored `requestLogPolicy.Do()` per above changes.
+* Cleaned up/added logging in retry policy.
+* Export `NewResponseError()`
+* Fix `RequestLogOptions` comment
+
+
+## 0.10.1 (2020-09-17)
+### Features Added
+* Add default console logger
+* Default console logger writes to stderr. To enable it, set env var `AZURE_SDK_GO_LOGGING` to the value 'all'.
+* Added `Logger.Writef()` to reduce the need for `ShouldLog()` checks.
+* Add `LogLongRunningOperation`
+
+
+## 0.10.0 (2020-09-10)
+### Features Added
+* The `request` and `transport` interfaces have been refactored to align with the patterns in the standard library.
+* `NewRequest()` now uses `http.NewRequestWithContext()` and performs additional validation, it also requires a context parameter.
+* The `Policy` and `Transport` interfaces have had their context parameter removed as the context is associated with the underlying `http.Request`.
+* `Pipeline.Do()` will validate the HTTP request before sending it through the pipeline, avoiding retries on a malformed request.
+* The `Retrier` interface has been replaced with the `NonRetriableError` interface, and the retry policy updated to test for this.
+* `Request.SetBody()` now requires a content type parameter for setting the request's MIME type.
+* moved path concatenation into `JoinPaths()` func
+
+
+## 0.9.6 (2020-08-18)
+### Features Added
+* Improvements to body download policy
+* Always download the response body for error responses, i.e. HTTP status codes >= 400.
+* Simplify variable declarations
+
+
+## 0.9.5 (2020-08-11)
+### Features Added
+* Set the Content-Length header in `Request.SetBody`
+
+
+## 0.9.4 (2020-08-03)
+### Features Added
+* Fix cancellation of per try timeout
+* Per try timeout is used to ensure that an HTTP operation doesn't take too long, e.g. that a GET on some URL doesn't take an inordinant amount of time.
+* Once the HTTP request returns, the per try timeout should be cancelled, not when the response has been read to completion.
+* Do not drain response body if there are no more retries
+* Do not retry non-idempotent operations when body download fails
+
+
+## 0.9.3 (2020-07-28)
+### Features Added
+* Add support for custom HTTP request headers
+* Inserts an internal policy into the pipeline that can extract HTTP header values from the caller's context, adding them to the request.
+* Use `azcore.WithHTTPHeader` to add HTTP headers to a context.
+* Remove method specific to Go 1.14
+
+
+## 0.9.2 (2020-07-28)
+### Features Added
+* Omit read-only content from request payloads
+* If any field in a payload's object graph contains `azure:"ro"`, make a clone of the object graph, omitting all fields with this annotation.
+* Verify no fields were dropped
+* Handle embedded struct types
+* Added test for cloning by value
+* Add messages to failures
+
+
+## 0.9.1 (2020-07-22)
+### Features Added
+* Updated dependency on internal module to fix race condition.
+
+
+## 0.9.0 (2020-07-09)
+### Features Added
+* Add `HTTPResponse` interface to be used by callers to access the raw HTTP response from an error in the event of an API call failure.
+* Updated `sdk/internal` dependency to latest version.
+* Rename package alias
+
+
+## 0.8.2 (2020-06-29)
+### Features Added
+* Added missing documentation comments
+
+### Bugs Fixed
+* Fixed a bug in body download policy.
+
+
+## 0.8.1 (2020-06-26)
+### Features Added
+* Miscellaneous clean-up reported by linters
+
+
+## 0.8.0 (2020-06-01)
+### Features Added
+* Differentiate between standard and URL encoding.
+
+
+## 0.7.1 (2020-05-27)
+### Features Added
+* Add support for for base64 encoding and decoding of payloads.
+
+
+## 0.7.0 (2020-05-12)
+### Features Added
+* Change `RetryAfter()` to a function.
+
+
+## 0.6.0 (2020-04-29)
+### Features Added
+* Updating `RetryAfter` to only return the detaion in the RetryAfter header
+
+
+## 0.5.0 (2020-03-23)
+### Features Added
+* Export `TransportFunc`
+
+### Breaking Changes
+* Removed `IterationDone`
+
+
+## 0.4.1 (2020-02-25)
+### Features Added
+* Ensure per-try timeout is properly cancelled
+* Explicitly call cancel the per-try timeout when the response body has been read/closed by the body download policy.
+* When the response body is returned to the caller for reading/closing, wrap it in a `responseBodyReader` that will cancel the timeout when the body is closed.
+* `Logger.Should()` will return false if no listener is set.
+
+
+## 0.4.0 (2020-02-18)
+### Features Added
+* Enable custom `RetryOptions` to be specified per API call
+* Added `WithRetryOptions()` that adds a custom `RetryOptions` to the provided context, allowing custom settings per API call.
+* Remove 429 from the list of default HTTP status codes for retry.
+* Change StatusCodesForRetry to a slice so consumers can append to it.
+* Added support for retry-after in HTTP-date format.
+* Cleaned up some comments specific to storage.
+* Remove `Request.SetQueryParam()`
+* Renamed `MaxTries` to `MaxRetries`
+
+## 0.3.0 (2020-01-16)
+### Features Added
+* Added `DefaultRetryOptions` to create initialized default options.
+
+### Breaking Changes
+* Removed `Response.CheckStatusCode()`
+
+
+## 0.2.0 (2020-01-15)
+### Features Added
+* Add support for marshalling and unmarshalling JSON
+* Removed `Response.Payload` field
+* Exit early when unmarsahlling if there is no payload
+
+
+## 0.1.0 (2020-01-10)
+### Features Added
+* Initial release
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) Microsoft Corporation.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE
@@ -0,0 +1,39 @@
+# Azure Core Client Module for Go
+
+[](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azcore)
+[](https://dev.azure.com/azure-sdk/public/_build/latest?definitionId=1843&branchName=main)
+[](https://img.shields.io/azure-devops/coverage/azure-sdk/public/1843/main)
+
+The `azcore` module provides a set of common interfaces and types for Go SDK client modules.
+These modules follow the [Azure SDK Design Guidelines for Go](https://azure.github.io/azure-sdk/golang_introduction.html).
+
+## Getting started
+
+This project uses [Go modules](https://github.com/golang/go/wiki/Modules) for versioning and dependency management.
+
+Typically, you will not need to explicitly install `azcore` as it will be installed as a client module dependency.
+To add the latest version to your `go.mod` file, execute the following command.
+
+```bash
+go get github.com/Azure/azure-sdk-for-go/sdk/azcore
+```
+
+General documentation and examples can be found on [pkg.go.dev](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azcore).
+
+## Contributing
+This project welcomes contributions and suggestions. Most contributions require
+you to agree to a Contributor License Agreement (CLA) declaring that you have
+the right to, and actually do, grant us the rights to use your contribution.
+For details, visit [https://cla.microsoft.com](https://cla.microsoft.com).
+
+When you submit a pull request, a CLA-bot will automatically determine whether
+you need to provide a CLA and decorate the PR appropriately (e.g., label,
+comment). Simply follow the instructions provided by the bot. You will only
+need to do this once across all repos using our CLA.
+
+This project has adopted the
+[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+For more information, see the
+[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
+or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any
+additional questions or comments.
@@ -0,0 +1,239 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package resource
+
+import (
+ "fmt"
+ "strings"
+)
+
+const (
+ providersKey = "providers"
+ subscriptionsKey = "subscriptions"
+ resourceGroupsLowerKey = "resourcegroups"
+ locationsKey = "locations"
+ builtInResourceNamespace = "Microsoft.Resources"
+)
+
+// RootResourceID defines the tenant as the root parent of all other ResourceID.
+var RootResourceID = &ResourceID{
+ Parent: nil,
+ ResourceType: TenantResourceType,
+ Name: "",
+}
+
+// ResourceID represents a resource ID such as `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRg`.
+// Don't create this type directly, use ParseResourceID instead.
+type ResourceID struct {
+ // Parent is the parent ResourceID of this instance.
+ // Can be nil if there is no parent.
+ Parent *ResourceID
+
+ // SubscriptionID is the subscription ID in this resource ID.
+ // The value can be empty if the resource ID does not contain a subscription ID.
+ SubscriptionID string
+
+ // ResourceGroupName is the resource group name in this resource ID.
+ // The value can be empty if the resource ID does not contain a resource group name.
+ ResourceGroupName string
+
+ // Provider represents the provider name in this resource ID.
+ // This is only valid when the resource ID represents a resource provider.
+ // Example: `/subscriptions/00000000-0000-0000-0000-000000000000/providers/Microsoft.Insights`
+ Provider string
+
+ // Location is the location in this resource ID.
+ // The value can be empty if the resource ID does not contain a location name.
+ Location string
+
+ // ResourceType represents the type of this resource ID.
+ ResourceType ResourceType
+
+ // Name is the resource name of this resource ID.
+ Name string
+
+ isChild bool
+ stringValue string
+}
+
+// ParseResourceID parses a string to an instance of ResourceID
+func ParseResourceID(id string) (*ResourceID, error) {
+ if len(id) == 0 {
+ return nil, fmt.Errorf("invalid resource ID: id cannot be empty")
+ }
+
+ if !strings.HasPrefix(id, "/") {
+ return nil, fmt.Errorf("invalid resource ID: resource id '%s' must start with '/'", id)
+ }
+
+ parts := splitStringAndOmitEmpty(id, "/")
+
+ if len(parts) < 2 {
+ return nil, fmt.Errorf("invalid resource ID: %s", id)
+ }
+
+ if !strings.EqualFold(parts[0], subscriptionsKey) && !strings.EqualFold(parts[0], providersKey) {
+ return nil, fmt.Errorf("invalid resource ID: %s", id)
+ }
+
+ return appendNext(RootResourceID, parts, id)
+}
+
+// String returns the string of the ResourceID
+func (id *ResourceID) String() string {
+ if len(id.stringValue) > 0 {
+ return id.stringValue
+ }
+
+ if id.Parent == nil {
+ return ""
+ }
+
+ builder := strings.Builder{}
+ builder.WriteString(id.Parent.String())
+
+ if id.isChild {
+ builder.WriteString(fmt.Sprintf("/%s", id.ResourceType.lastType()))
+ if len(id.Name) > 0 {
+ builder.WriteString(fmt.Sprintf("/%s", id.Name))
+ }
+ } else {
+ builder.WriteString(fmt.Sprintf("/providers/%s/%s/%s", id.ResourceType.Namespace, id.ResourceType.Type, id.Name))
+ }
+
+ id.stringValue = builder.String()
+
+ return id.stringValue
+}
+
+// MarshalText returns a textual representation of the ResourceID
+func (id *ResourceID) MarshalText() ([]byte, error) {
+ return []byte(id.String()), nil
+}
+
+// UnmarshalText decodes the textual representation of a ResourceID
+func (id *ResourceID) UnmarshalText(text []byte) error {
+ newId, err := ParseResourceID(string(text))
+ if err != nil {
+ return err
+ }
+ *id = *newId
+ return nil
+}
+
+func newResourceID(parent *ResourceID, resourceTypeName string, resourceName string) *ResourceID {
+ id := &ResourceID{}
+ id.init(parent, chooseResourceType(resourceTypeName, parent), resourceName, true)
+ return id
+}
+
+func newResourceIDWithResourceType(parent *ResourceID, resourceType ResourceType, resourceName string) *ResourceID {
+ id := &ResourceID{}
+ id.init(parent, resourceType, resourceName, true)
+ return id
+}
+
+func newResourceIDWithProvider(parent *ResourceID, providerNamespace, resourceTypeName, resourceName string) *ResourceID {
+ id := &ResourceID{}
+ id.init(parent, NewResourceType(providerNamespace, resourceTypeName), resourceName, false)
+ return id
+}
+
+func chooseResourceType(resourceTypeName string, parent *ResourceID) ResourceType {
+ if strings.EqualFold(resourceTypeName, resourceGroupsLowerKey) {
+ return ResourceGroupResourceType
+ } else if strings.EqualFold(resourceTypeName, subscriptionsKey) && parent != nil && parent.ResourceType.String() == TenantResourceType.String() {
+ return SubscriptionResourceType
+ }
+
+ return parent.ResourceType.AppendChild(resourceTypeName)
+}
+
+func (id *ResourceID) init(parent *ResourceID, resourceType ResourceType, name string, isChild bool) {
+ if parent != nil {
+ id.Provider = parent.Provider
+ id.SubscriptionID = parent.SubscriptionID
+ id.ResourceGroupName = parent.ResourceGroupName
+ id.Location = parent.Location
+ }
+
+ if resourceType.String() == SubscriptionResourceType.String() {
+ id.SubscriptionID = name
+ }
+
+ if resourceType.lastType() == locationsKey {
+ id.Location = name
+ }
+
+ if resourceType.String() == ResourceGroupResourceType.String() {
+ id.ResourceGroupName = name
+ }
+
+ if resourceType.String() == ProviderResourceType.String() {
+ id.Provider = name
+ }
+
+ if parent == nil {
+ id.Parent = RootResourceID
+ } else {
+ id.Parent = parent
+ }
+ id.isChild = isChild
+ id.ResourceType = resourceType
+ id.Name = name
+}
+
+func appendNext(parent *ResourceID, parts []string, id string) (*ResourceID, error) {
+ if len(parts) == 0 {
+ return parent, nil
+ }
+
+ if len(parts) == 1 {
+ // subscriptions and resourceGroups are not valid ids without their names
+ if strings.EqualFold(parts[0], subscriptionsKey) || strings.EqualFold(parts[0], resourceGroupsLowerKey) {
+ return nil, fmt.Errorf("invalid resource ID: %s", id)
+ }
+
+ // resourceGroup must contain either child or provider resource type
+ if parent.ResourceType.String() == ResourceGroupResourceType.String() {
+ return nil, fmt.Errorf("invalid resource ID: %s", id)
+ }
+
+ return newResourceID(parent, parts[0], ""), nil
+ }
+
+ if strings.EqualFold(parts[0], providersKey) && (len(parts) == 2 || strings.EqualFold(parts[2], providersKey)) {
+ // provider resource can only be on a tenant or a subscription parent
+ if parent.ResourceType.String() != SubscriptionResourceType.String() && parent.ResourceType.String() != TenantResourceType.String() {
+ return nil, fmt.Errorf("invalid resource ID: %s", id)
+ }
+
+ return appendNext(newResourceIDWithResourceType(parent, ProviderResourceType, parts[1]), parts[2:], id)
+ }
+
+ if len(parts) > 3 && strings.EqualFold(parts[0], providersKey) {
+ return appendNext(newResourceIDWithProvider(parent, parts[1], parts[2], parts[3]), parts[4:], id)
+ }
+
+ if len(parts) > 1 && !strings.EqualFold(parts[0], providersKey) {
+ return appendNext(newResourceID(parent, parts[0], parts[1]), parts[2:], id)
+ }
+
+ return nil, fmt.Errorf("invalid resource ID: %s", id)
+}
+
+func splitStringAndOmitEmpty(v, sep string) []string {
+ r := make([]string, 0)
+ for _, s := range strings.Split(v, sep) {
+ if len(s) == 0 {
+ continue
+ }
+ r = append(r, s)
+ }
+
+ return r
+}
@@ -0,0 +1,114 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package resource
+
+import (
+ "fmt"
+ "strings"
+)
+
+// SubscriptionResourceType is the ResourceType of a subscription
+var SubscriptionResourceType = NewResourceType(builtInResourceNamespace, "subscriptions")
+
+// ResourceGroupResourceType is the ResourceType of a resource group
+var ResourceGroupResourceType = NewResourceType(builtInResourceNamespace, "resourceGroups")
+
+// TenantResourceType is the ResourceType of a tenant
+var TenantResourceType = NewResourceType(builtInResourceNamespace, "tenants")
+
+// ProviderResourceType is the ResourceType of a provider
+var ProviderResourceType = NewResourceType(builtInResourceNamespace, "providers")
+
+// ResourceType represents an Azure resource type, e.g. "Microsoft.Network/virtualNetworks/subnets".
+// Don't create this type directly, use ParseResourceType or NewResourceType instead.
+type ResourceType struct {
+ // Namespace is the namespace of the resource type.
+ // e.g. "Microsoft.Network" in resource type "Microsoft.Network/virtualNetworks/subnets"
+ Namespace string
+
+ // Type is the full type name of the resource type.
+ // e.g. "virtualNetworks/subnets" in resource type "Microsoft.Network/virtualNetworks/subnets"
+ Type string
+
+ // Types is the slice of all the sub-types of this resource type.
+ // e.g. ["virtualNetworks", "subnets"] in resource type "Microsoft.Network/virtualNetworks/subnets"
+ Types []string
+
+ stringValue string
+}
+
+// String returns the string of the ResourceType
+func (t ResourceType) String() string {
+ return t.stringValue
+}
+
+// IsParentOf returns true when the receiver is the parent resource type of the child.
+func (t ResourceType) IsParentOf(child ResourceType) bool {
+ if !strings.EqualFold(t.Namespace, child.Namespace) {
+ return false
+ }
+ if len(t.Types) >= len(child.Types) {
+ return false
+ }
+ for i := range t.Types {
+ if !strings.EqualFold(t.Types[i], child.Types[i]) {
+ return false
+ }
+ }
+
+ return true
+}
+
+// AppendChild creates an instance of ResourceType using the receiver as the parent with childType appended to it.
+func (t ResourceType) AppendChild(childType string) ResourceType {
+ return NewResourceType(t.Namespace, fmt.Sprintf("%s/%s", t.Type, childType))
+}
+
+// NewResourceType creates an instance of ResourceType using a provider namespace
+// such as "Microsoft.Network" and type such as "virtualNetworks/subnets".
+func NewResourceType(providerNamespace, typeName string) ResourceType {
+ return ResourceType{
+ Namespace: providerNamespace,
+ Type: typeName,
+ Types: splitStringAndOmitEmpty(typeName, "/"),
+ stringValue: fmt.Sprintf("%s/%s", providerNamespace, typeName),
+ }
+}
+
+// ParseResourceType parses the ResourceType from a resource type string (e.g. Microsoft.Network/virtualNetworks/subsets)
+// or a resource identifier string.
+// e.g. /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myRg/providers/Microsoft.Network/virtualNetworks/vnet/subnets/mySubnet)
+func ParseResourceType(resourceIDOrType string) (ResourceType, error) {
+ // split the path into segments
+ parts := splitStringAndOmitEmpty(resourceIDOrType, "/")
+
+ // There must be at least a namespace and type name
+ if len(parts) < 1 {
+ return ResourceType{}, fmt.Errorf("invalid resource ID or type: %s", resourceIDOrType)
+ }
+
+ // if the type is just subscriptions, it is a built-in type in the Microsoft.Resources namespace
+ if len(parts) == 1 {
+ // Simple resource type
+ return NewResourceType(builtInResourceNamespace, parts[0]), nil
+ } else if strings.Contains(parts[0], ".") {
+ // Handle resource types (Microsoft.Compute/virtualMachines, Microsoft.Network/virtualNetworks/subnets)
+ // it is a full type name
+ return NewResourceType(parts[0], strings.Join(parts[1:], "/")), nil
+ } else {
+ // Check if ResourceID
+ id, err := ParseResourceID(resourceIDOrType)
+ if err != nil {
+ return ResourceType{}, err
+ }
+ return NewResourceType(id.ResourceType.Namespace, id.ResourceType.Type), nil
+ }
+}
+
+func (t ResourceType) lastType() string {
+ return t.Types[len(t.Types)-1]
+}
@@ -0,0 +1,108 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package policy
+
+import (
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+)
+
+// BearerTokenOptions configures the bearer token policy's behavior.
+type BearerTokenOptions struct {
+ // AuxiliaryTenants are additional tenant IDs for authenticating cross-tenant requests.
+ // The policy will add a token from each of these tenants to every request. The
+ // authenticating user or service principal must be a guest in these tenants, and the
+ // policy's credential must support multitenant authentication.
+ AuxiliaryTenants []string
+
+ // InsecureAllowCredentialWithHTTP enables authenticated requests over HTTP.
+ // By default, authenticated requests to an HTTP endpoint are rejected by the client.
+ // WARNING: setting this to true will allow sending the authentication key in clear text. Use with caution.
+ InsecureAllowCredentialWithHTTP bool
+
+ // Scopes contains the list of permission scopes required for the token.
+ Scopes []string
+}
+
+// RegistrationOptions configures the registration policy's behavior.
+// All zero-value fields will be initialized with their default values.
+type RegistrationOptions struct {
+ policy.ClientOptions
+
+ // MaxAttempts is the total number of times to attempt automatic registration
+ // in the event that an attempt fails.
+ // The default value is 3.
+ // Set to a value less than zero to disable the policy.
+ MaxAttempts int
+
+ // PollingDelay is the amount of time to sleep between polling intervals.
+ // The default value is 15 seconds.
+ // A value less than zero means no delay between polling intervals (not recommended).
+ PollingDelay time.Duration
+
+ // PollingDuration is the amount of time to wait before abandoning polling.
+ // The default valule is 5 minutes.
+ // NOTE: Setting this to a small value might cause the policy to prematurely fail.
+ PollingDuration time.Duration
+
+ // StatusCodes contains the slice of custom HTTP status codes to use instead
+ // of the default http.StatusConflict. This should only be set if a service
+ // returns a non-standard HTTP status code when unregistered.
+ StatusCodes []int
+}
+
+// ClientOptions contains configuration settings for a client's pipeline.
+type ClientOptions struct {
+ policy.ClientOptions
+
+ // AuxiliaryTenants are additional tenant IDs for authenticating cross-tenant requests.
+ // The client will add a token from each of these tenants to every request. The
+ // authenticating user or service principal must be a guest in these tenants, and the
+ // client's credential must support multitenant authentication.
+ AuxiliaryTenants []string
+
+ // DisableRPRegistration disables the auto-RP registration policy. Defaults to false.
+ DisableRPRegistration bool
+}
+
+// Clone return a deep copy of the current options.
+func (o *ClientOptions) Clone() *ClientOptions {
+ if o == nil {
+ return nil
+ }
+ copiedOptions := *o
+ copiedOptions.Cloud.Services = copyMap(copiedOptions.Cloud.Services)
+ copiedOptions.Logging.AllowedHeaders = copyArray(copiedOptions.Logging.AllowedHeaders)
+ copiedOptions.Logging.AllowedQueryParams = copyArray(copiedOptions.Logging.AllowedQueryParams)
+ copiedOptions.Retry.StatusCodes = copyArray(copiedOptions.Retry.StatusCodes)
+ copiedOptions.PerRetryPolicies = copyArray(copiedOptions.PerRetryPolicies)
+ copiedOptions.PerCallPolicies = copyArray(copiedOptions.PerCallPolicies)
+ return &copiedOptions
+}
+
+// copyMap return a new map with all the key value pair in the src map
+func copyMap[K comparable, V any](src map[K]V) map[K]V {
+ if src == nil {
+ return nil
+ }
+ copiedMap := make(map[K]V)
+ for k, v := range src {
+ copiedMap[k] = v
+ }
+ return copiedMap
+}
+
+// copyMap return a new array with all the elements in the src array
+func copyArray[T any](src []T) []T {
+ if src == nil {
+ return nil
+ }
+ copiedArray := make([]T, len(src))
+ copy(copiedArray, src)
+ return copiedArray
+}
@@ -0,0 +1,70 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "errors"
+ "reflect"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ armpolicy "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ azpolicy "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ azruntime "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+)
+
+// NewPipeline creates a pipeline from connection options. Policies from ClientOptions are
+// placed after policies from PipelineOptions. The telemetry policy, when enabled, will
+// use the specified module and version info.
+func NewPipeline(module, version string, cred azcore.TokenCredential, plOpts azruntime.PipelineOptions, options *armpolicy.ClientOptions) (azruntime.Pipeline, error) {
+ if options == nil {
+ options = &armpolicy.ClientOptions{}
+ }
+ conf, err := getConfiguration(&options.ClientOptions)
+ if err != nil {
+ return azruntime.Pipeline{}, err
+ }
+ authPolicy := NewBearerTokenPolicy(cred, &armpolicy.BearerTokenOptions{
+ AuxiliaryTenants: options.AuxiliaryTenants,
+ InsecureAllowCredentialWithHTTP: options.InsecureAllowCredentialWithHTTP,
+ Scopes: []string{conf.Audience + "/.default"},
+ })
+ // we don't want to modify the underlying array in plOpts.PerRetry
+ perRetry := make([]azpolicy.Policy, len(plOpts.PerRetry), len(plOpts.PerRetry)+1)
+ copy(perRetry, plOpts.PerRetry)
+ perRetry = append(perRetry, authPolicy, exported.PolicyFunc(httpTraceNamespacePolicy))
+ plOpts.PerRetry = perRetry
+ if !options.DisableRPRegistration {
+ regRPOpts := armpolicy.RegistrationOptions{ClientOptions: options.ClientOptions}
+ regPolicy, err := NewRPRegistrationPolicy(cred, ®RPOpts)
+ if err != nil {
+ return azruntime.Pipeline{}, err
+ }
+ // we don't want to modify the underlying array in plOpts.PerCall
+ perCall := make([]azpolicy.Policy, len(plOpts.PerCall), len(plOpts.PerCall)+1)
+ copy(perCall, plOpts.PerCall)
+ perCall = append(perCall, regPolicy)
+ plOpts.PerCall = perCall
+ }
+ if plOpts.APIVersion.Name == "" {
+ plOpts.APIVersion.Name = "api-version"
+ }
+ return azruntime.NewPipeline(module, version, plOpts, &options.ClientOptions), nil
+}
+
+func getConfiguration(o *azpolicy.ClientOptions) (cloud.ServiceConfiguration, error) {
+ c := cloud.AzurePublic
+ if !reflect.ValueOf(o.Cloud).IsZero() {
+ c = o.Cloud
+ }
+ if conf, ok := c.Services[cloud.ResourceManager]; ok && conf.Endpoint != "" && conf.Audience != "" {
+ return conf, nil
+ } else {
+ return conf, errors.New("provided Cloud field is missing Azure Resource Manager configuration")
+ }
+}
@@ -0,0 +1,102 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "strings"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ armpolicy "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ azpolicy "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ azruntime "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/temporal"
+)
+
+const headerAuxiliaryAuthorization = "x-ms-authorization-auxiliary"
+
+// acquiringResourceState holds data for an auxiliary token request
+type acquiringResourceState struct {
+ ctx context.Context
+ p *BearerTokenPolicy
+ tenant string
+}
+
+// acquireAuxToken acquires a token from an auxiliary tenant. Only one thread/goroutine at a time ever calls this function.
+func acquireAuxToken(state acquiringResourceState) (newResource azcore.AccessToken, newExpiration time.Time, err error) {
+ tk, err := state.p.cred.GetToken(state.ctx, azpolicy.TokenRequestOptions{
+ EnableCAE: true,
+ Scopes: state.p.scopes,
+ TenantID: state.tenant,
+ })
+ if err != nil {
+ return azcore.AccessToken{}, time.Time{}, err
+ }
+ return tk, tk.ExpiresOn, nil
+}
+
+// BearerTokenPolicy authorizes requests with bearer tokens acquired from a TokenCredential.
+type BearerTokenPolicy struct {
+ auxResources map[string]*temporal.Resource[azcore.AccessToken, acquiringResourceState]
+ btp *azruntime.BearerTokenPolicy
+ cred azcore.TokenCredential
+ scopes []string
+}
+
+// NewBearerTokenPolicy creates a policy object that authorizes requests with bearer tokens.
+// cred: an azcore.TokenCredential implementation such as a credential object from azidentity
+// opts: optional settings. Pass nil to accept default values; this is the same as passing a zero-value options.
+func NewBearerTokenPolicy(cred azcore.TokenCredential, opts *armpolicy.BearerTokenOptions) *BearerTokenPolicy {
+ if opts == nil {
+ opts = &armpolicy.BearerTokenOptions{}
+ }
+ p := &BearerTokenPolicy{cred: cred}
+ p.auxResources = make(map[string]*temporal.Resource[azcore.AccessToken, acquiringResourceState], len(opts.AuxiliaryTenants))
+ for _, t := range opts.AuxiliaryTenants {
+ p.auxResources[t] = temporal.NewResource(acquireAuxToken)
+ }
+ p.scopes = make([]string, len(opts.Scopes))
+ copy(p.scopes, opts.Scopes)
+ p.btp = azruntime.NewBearerTokenPolicy(cred, opts.Scopes, &azpolicy.BearerTokenOptions{
+ InsecureAllowCredentialWithHTTP: opts.InsecureAllowCredentialWithHTTP,
+ AuthorizationHandler: azpolicy.AuthorizationHandler{
+ OnRequest: p.onRequest,
+ },
+ })
+ return p
+}
+
+// onRequest authorizes requests with one or more bearer tokens
+func (b *BearerTokenPolicy) onRequest(req *azpolicy.Request, authNZ func(azpolicy.TokenRequestOptions) error) error {
+ // authorize the request with a token for the primary tenant
+ err := authNZ(azpolicy.TokenRequestOptions{Scopes: b.scopes})
+ if err != nil || len(b.auxResources) == 0 {
+ return err
+ }
+ // add tokens for auxiliary tenants
+ as := acquiringResourceState{
+ ctx: req.Raw().Context(),
+ p: b,
+ }
+ auxTokens := make([]string, 0, len(b.auxResources))
+ for tenant, er := range b.auxResources {
+ as.tenant = tenant
+ auxTk, err := er.Get(as)
+ if err != nil {
+ return err
+ }
+ auxTokens = append(auxTokens, fmt.Sprintf("%s%s", shared.BearerTokenPrefix, auxTk.Token))
+ }
+ req.Raw().Header.Set(headerAuxiliaryAuthorization, strings.Join(auxTokens, ", "))
+ return nil
+}
+
+// Do authorizes a request with a bearer token
+func (b *BearerTokenPolicy) Do(req *azpolicy.Request) (*http.Response, error) {
+ return b.btp.Do(req)
+}
@@ -0,0 +1,322 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/internal/resource"
+ armpolicy "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ azpolicy "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+)
+
+const (
+ // LogRPRegistration entries contain information specific to the automatic registration of an RP.
+ // Entries of this classification are written IFF the policy needs to take any action.
+ LogRPRegistration log.Event = "RPRegistration"
+)
+
+// init sets any default values
+func setDefaults(r *armpolicy.RegistrationOptions) {
+ if r.MaxAttempts == 0 {
+ r.MaxAttempts = 3
+ } else if r.MaxAttempts < 0 {
+ r.MaxAttempts = 0
+ }
+ if r.PollingDelay == 0 {
+ r.PollingDelay = 15 * time.Second
+ } else if r.PollingDelay < 0 {
+ r.PollingDelay = 0
+ }
+ if r.PollingDuration == 0 {
+ r.PollingDuration = 5 * time.Minute
+ }
+ if len(r.StatusCodes) == 0 {
+ r.StatusCodes = []int{http.StatusConflict}
+ }
+}
+
+// NewRPRegistrationPolicy creates a policy object configured using the specified options.
+// The policy controls whether an unregistered resource provider should automatically be
+// registered. See https://aka.ms/rps-not-found for more information.
+func NewRPRegistrationPolicy(cred azcore.TokenCredential, o *armpolicy.RegistrationOptions) (azpolicy.Policy, error) {
+ if o == nil {
+ o = &armpolicy.RegistrationOptions{}
+ }
+ conf, err := getConfiguration(&o.ClientOptions)
+ if err != nil {
+ return nil, err
+ }
+ authPolicy := NewBearerTokenPolicy(cred, &armpolicy.BearerTokenOptions{Scopes: []string{conf.Audience + "/.default"}})
+ p := &rpRegistrationPolicy{
+ endpoint: conf.Endpoint,
+ pipeline: runtime.NewPipeline(shared.Module, shared.Version, runtime.PipelineOptions{PerRetry: []azpolicy.Policy{authPolicy}}, &o.ClientOptions),
+ options: *o,
+ }
+ // init the copy
+ setDefaults(&p.options)
+ return p, nil
+}
+
+type rpRegistrationPolicy struct {
+ endpoint string
+ pipeline runtime.Pipeline
+ options armpolicy.RegistrationOptions
+}
+
+func (r *rpRegistrationPolicy) Do(req *azpolicy.Request) (*http.Response, error) {
+ if r.options.MaxAttempts == 0 {
+ // policy is disabled
+ return req.Next()
+ }
+ const registeredState = "Registered"
+ var rp string
+ var resp *http.Response
+ for attempts := 0; attempts < r.options.MaxAttempts; attempts++ {
+ var err error
+ // make the original request
+ resp, err = req.Next()
+ // getting a 409 is the first indication that the RP might need to be registered, check error response
+ if err != nil || !runtime.HasStatusCode(resp, r.options.StatusCodes...) {
+ return resp, err
+ }
+ var reqErr requestError
+ if err = runtime.UnmarshalAsJSON(resp, &reqErr); err != nil {
+ return resp, err
+ }
+ if reqErr.ServiceError == nil {
+ // missing service error info. just return the response
+ // to the caller so its error unmarshalling will kick in
+ return resp, err
+ }
+ if !isUnregisteredRPCode(reqErr.ServiceError.Code) {
+ // not a 409 due to unregistered RP. just return the response
+ // to the caller so its error unmarshalling will kick in
+ return resp, err
+ }
+ res, err := resource.ParseResourceID(req.Raw().URL.Path)
+ if err != nil {
+ return resp, err
+ }
+ rp = res.ResourceType.Namespace
+ logRegistrationExit := func(v any) {
+ log.Writef(LogRPRegistration, "END registration for %s: %v", rp, v)
+ }
+ log.Writef(LogRPRegistration, "BEGIN registration for %s", rp)
+ // create client and make the registration request
+ // we use the scheme and host from the original request
+ rpOps := &providersOperations{
+ p: r.pipeline,
+ u: r.endpoint,
+ subID: res.SubscriptionID,
+ }
+ if _, err = rpOps.Register(&shared.ContextWithDeniedValues{Context: req.Raw().Context()}, rp); err != nil {
+ logRegistrationExit(err)
+ return resp, err
+ }
+
+ // RP was registered, however we need to wait for the registration to complete
+ pollCtx, pollCancel := context.WithTimeout(&shared.ContextWithDeniedValues{Context: req.Raw().Context()}, r.options.PollingDuration)
+ var lastRegState string
+ for {
+ // get the current registration state
+ getResp, err := rpOps.Get(pollCtx, rp)
+ if err != nil {
+ pollCancel()
+ logRegistrationExit(err)
+ return resp, err
+ }
+ if getResp.Provider.RegistrationState != nil && !strings.EqualFold(*getResp.Provider.RegistrationState, lastRegState) {
+ // registration state has changed, or was updated for the first time
+ lastRegState = *getResp.Provider.RegistrationState
+ log.Writef(LogRPRegistration, "registration state is %s", lastRegState)
+ }
+ if strings.EqualFold(lastRegState, registeredState) {
+ // registration complete
+ pollCancel()
+ logRegistrationExit(lastRegState)
+ break
+ }
+ // wait before trying again
+ select {
+ case <-time.After(r.options.PollingDelay):
+ // continue polling
+ case <-pollCtx.Done():
+ pollCancel()
+ logRegistrationExit(pollCtx.Err())
+ return resp, pollCtx.Err()
+ }
+ }
+ // RP was successfully registered, retry the original request
+ err = req.RewindBody()
+ if err != nil {
+ return resp, err
+ }
+ }
+ // if we get here it means we exceeded the number of attempts
+ return resp, fmt.Errorf("exceeded attempts to register %s", rp)
+}
+
+var unregisteredRPCodes = []string{
+ "MissingSubscriptionRegistration",
+ "MissingRegistrationForResourceProvider",
+ "Subscription Not Registered",
+ "SubscriptionNotRegistered",
+}
+
+func isUnregisteredRPCode(errorCode string) bool {
+ for _, code := range unregisteredRPCodes {
+ if strings.EqualFold(errorCode, code) {
+ return true
+ }
+ }
+ return false
+}
+
+// minimal error definitions to simplify detection
+type requestError struct {
+ ServiceError *serviceError `json:"error"`
+}
+
+type serviceError struct {
+ Code string `json:"code"`
+}
+
+///////////////////////////////////////////////////////////////////////////////////////////////
+// the following code was copied from module armresources, providers.go and models.go
+// only the minimum amount of code was copied to get this working and some edits were made.
+///////////////////////////////////////////////////////////////////////////////////////////////
+
+type providersOperations struct {
+ p runtime.Pipeline
+ u string
+ subID string
+}
+
+// Get - Gets the specified resource provider.
+func (client *providersOperations) Get(ctx context.Context, resourceProviderNamespace string) (providerResponse, error) {
+ req, err := client.getCreateRequest(ctx, resourceProviderNamespace)
+ if err != nil {
+ return providerResponse{}, err
+ }
+ resp, err := client.p.Do(req)
+ if err != nil {
+ return providerResponse{}, err
+ }
+ result, err := client.getHandleResponse(resp)
+ if err != nil {
+ return providerResponse{}, err
+ }
+ return result, nil
+}
+
+// getCreateRequest creates the Get request.
+func (client *providersOperations) getCreateRequest(ctx context.Context, resourceProviderNamespace string) (*azpolicy.Request, error) {
+ urlPath := "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}"
+ urlPath = strings.ReplaceAll(urlPath, "{resourceProviderNamespace}", url.PathEscape(resourceProviderNamespace))
+ urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subID))
+ req, err := runtime.NewRequest(ctx, http.MethodGet, runtime.JoinPaths(client.u, urlPath))
+ if err != nil {
+ return nil, err
+ }
+ query := req.Raw().URL.Query()
+ query.Set("api-version", "2019-05-01")
+ req.Raw().URL.RawQuery = query.Encode()
+ return req, nil
+}
+
+// getHandleResponse handles the Get response.
+func (client *providersOperations) getHandleResponse(resp *http.Response) (providerResponse, error) {
+ if !runtime.HasStatusCode(resp, http.StatusOK) {
+ return providerResponse{}, exported.NewResponseError(resp)
+ }
+ result := providerResponse{RawResponse: resp}
+ err := runtime.UnmarshalAsJSON(resp, &result.Provider)
+ if err != nil {
+ return providerResponse{}, err
+ }
+ return result, err
+}
+
+// Register - Registers a subscription with a resource provider.
+func (client *providersOperations) Register(ctx context.Context, resourceProviderNamespace string) (providerResponse, error) {
+ req, err := client.registerCreateRequest(ctx, resourceProviderNamespace)
+ if err != nil {
+ return providerResponse{}, err
+ }
+ resp, err := client.p.Do(req)
+ if err != nil {
+ return providerResponse{}, err
+ }
+ result, err := client.registerHandleResponse(resp)
+ if err != nil {
+ return providerResponse{}, err
+ }
+ return result, nil
+}
+
+// registerCreateRequest creates the Register request.
+func (client *providersOperations) registerCreateRequest(ctx context.Context, resourceProviderNamespace string) (*azpolicy.Request, error) {
+ urlPath := "/subscriptions/{subscriptionId}/providers/{resourceProviderNamespace}/register"
+ urlPath = strings.ReplaceAll(urlPath, "{resourceProviderNamespace}", url.PathEscape(resourceProviderNamespace))
+ urlPath = strings.ReplaceAll(urlPath, "{subscriptionId}", url.PathEscape(client.subID))
+ req, err := runtime.NewRequest(ctx, http.MethodPost, runtime.JoinPaths(client.u, urlPath))
+ if err != nil {
+ return nil, err
+ }
+ query := req.Raw().URL.Query()
+ query.Set("api-version", "2019-05-01")
+ req.Raw().URL.RawQuery = query.Encode()
+ return req, nil
+}
+
+// registerHandleResponse handles the Register response.
+func (client *providersOperations) registerHandleResponse(resp *http.Response) (providerResponse, error) {
+ if !runtime.HasStatusCode(resp, http.StatusOK) {
+ return providerResponse{}, exported.NewResponseError(resp)
+ }
+ result := providerResponse{RawResponse: resp}
+ err := runtime.UnmarshalAsJSON(resp, &result.Provider)
+ if err != nil {
+ return providerResponse{}, err
+ }
+ return result, err
+}
+
+// ProviderResponse is the response envelope for operations that return a Provider type.
+type providerResponse struct {
+ // Resource provider information.
+ Provider *provider
+
+ // RawResponse contains the underlying HTTP response.
+ RawResponse *http.Response
+}
+
+// Provider - Resource provider information.
+type provider struct {
+ // The provider ID.
+ ID *string `json:"id,omitempty"`
+
+ // The namespace of the resource provider.
+ Namespace *string `json:"namespace,omitempty"`
+
+ // The registration policy of the resource provider.
+ RegistrationPolicy *string `json:"registrationPolicy,omitempty"`
+
+ // The registration state of the resource provider.
+ RegistrationState *string `json:"registrationState,omitempty"`
+}
@@ -0,0 +1,30 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/internal/resource"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/tracing"
+)
+
+// httpTraceNamespacePolicy is a policy that adds the az.namespace attribute to the current Span
+func httpTraceNamespacePolicy(req *policy.Request) (resp *http.Response, err error) {
+ rawTracer := req.Raw().Context().Value(shared.CtxWithTracingTracer{})
+ if tracer, ok := rawTracer.(tracing.Tracer); ok && tracer.Enabled() {
+ rt, err := resource.ParseResourceType(req.Raw().URL.Path)
+ if err == nil {
+ // add the namespace attribute to the current span
+ span := tracer.SpanFromContext(req.Raw().Context())
+ span.SetAttributes(tracing.Attribute{Key: shared.TracingNamespaceAttrName, Value: rt.Namespace})
+ }
+ }
+ return req.Next()
+}
@@ -0,0 +1,24 @@
+//go:build go1.16
+// +build go1.16
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud"
+
+func init() {
+ cloud.AzureChina.Services[cloud.ResourceManager] = cloud.ServiceConfiguration{
+ Audience: "https://management.core.chinacloudapi.cn",
+ Endpoint: "https://management.chinacloudapi.cn",
+ }
+ cloud.AzureGovernment.Services[cloud.ResourceManager] = cloud.ServiceConfiguration{
+ Audience: "https://management.core.usgovcloudapi.net",
+ Endpoint: "https://management.usgovcloudapi.net",
+ }
+ cloud.AzurePublic.Services[cloud.ResourceManager] = cloud.ServiceConfiguration{
+ Audience: "https://management.core.windows.net/",
+ Endpoint: "https://management.azure.com",
+ }
+}
@@ -0,0 +1,29 @@
+# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file.
+trigger:
+ branches:
+ include:
+ - main
+ - feature/*
+ - hotfix/*
+ - release/*
+ paths:
+ include:
+ - sdk/azcore/
+ - eng/
+
+pr:
+ branches:
+ include:
+ - main
+ - feature/*
+ - hotfix/*
+ - release/*
+ paths:
+ include:
+ - sdk/azcore/
+ - eng/
+
+extends:
+ template: /eng/pipelines/templates/jobs/archetype-sdk-client.yml
+ parameters:
+ ServiceDirectory: azcore
@@ -0,0 +1,44 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package cloud
+
+var (
+ // AzureChina contains configuration for Azure China.
+ AzureChina = Configuration{
+ ActiveDirectoryAuthorityHost: "https://login.chinacloudapi.cn/", Services: map[ServiceName]ServiceConfiguration{},
+ }
+ // AzureGovernment contains configuration for Azure Government.
+ AzureGovernment = Configuration{
+ ActiveDirectoryAuthorityHost: "https://login.microsoftonline.us/", Services: map[ServiceName]ServiceConfiguration{},
+ }
+ // AzurePublic contains configuration for Azure Public Cloud.
+ AzurePublic = Configuration{
+ ActiveDirectoryAuthorityHost: "https://login.microsoftonline.com/", Services: map[ServiceName]ServiceConfiguration{},
+ }
+)
+
+// ServiceName identifies a cloud service.
+type ServiceName string
+
+// ResourceManager is a global constant identifying Azure Resource Manager.
+const ResourceManager ServiceName = "resourceManager"
+
+// ServiceConfiguration configures a specific cloud service such as Azure Resource Manager.
+type ServiceConfiguration struct {
+ // Audience is the audience the client will request for its access tokens.
+ Audience string
+ // Endpoint is the service's base URL.
+ Endpoint string
+}
+
+// Configuration configures a cloud.
+type Configuration struct {
+ // ActiveDirectoryAuthorityHost is the base URL of the cloud's Azure Active Directory.
+ ActiveDirectoryAuthorityHost string
+ // Services contains configuration for the cloud's services.
+ Services map[ServiceName]ServiceConfiguration
+}
@@ -0,0 +1,53 @@
+//go:build go1.16
+// +build go1.16
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+/*
+Package cloud implements a configuration API for applications deployed to sovereign or private Azure clouds.
+
+Azure SDK client configuration defaults are appropriate for Azure Public Cloud (sometimes referred to as
+"Azure Commercial" or simply "Microsoft Azure"). This package enables applications deployed to other
+Azure Clouds to configure clients appropriately.
+
+This package contains predefined configuration for well-known sovereign clouds such as Azure Government and
+Azure China. Azure SDK clients accept this configuration via the Cloud field of azcore.ClientOptions. For
+example, configuring a credential and ARM client for Azure Government:
+
+ opts := azcore.ClientOptions{Cloud: cloud.AzureGovernment}
+ cred, err := azidentity.NewDefaultAzureCredential(
+ &azidentity.DefaultAzureCredentialOptions{ClientOptions: opts},
+ )
+ handle(err)
+
+ client, err := armsubscription.NewClient(
+ cred, &arm.ClientOptions{ClientOptions: opts},
+ )
+ handle(err)
+
+Applications deployed to a private cloud such as Azure Stack create a Configuration object with
+appropriate values:
+
+ c := cloud.Configuration{
+ ActiveDirectoryAuthorityHost: "https://...",
+ Services: map[cloud.ServiceName]cloud.ServiceConfiguration{
+ cloud.ResourceManager: {
+ Audience: "...",
+ Endpoint: "https://...",
+ },
+ },
+ }
+ opts := azcore.ClientOptions{Cloud: c}
+
+ cred, err := azidentity.NewDefaultAzureCredential(
+ &azidentity.DefaultAzureCredentialOptions{ClientOptions: opts},
+ )
+ handle(err)
+
+ client, err := armsubscription.NewClient(
+ cred, &arm.ClientOptions{ClientOptions: opts},
+ )
+ handle(err)
+*/
+package cloud
@@ -0,0 +1,173 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azcore
+
+import (
+ "reflect"
+ "sync"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/tracing"
+)
+
+// AccessToken represents an Azure service bearer access token with expiry information.
+type AccessToken = exported.AccessToken
+
+// TokenCredential represents a credential capable of providing an OAuth token.
+type TokenCredential = exported.TokenCredential
+
+// KeyCredential contains an authentication key used to authenticate to an Azure service.
+type KeyCredential = exported.KeyCredential
+
+// NewKeyCredential creates a new instance of [KeyCredential] with the specified values.
+// - key is the authentication key
+func NewKeyCredential(key string) *KeyCredential {
+ return exported.NewKeyCredential(key)
+}
+
+// SASCredential contains a shared access signature used to authenticate to an Azure service.
+type SASCredential = exported.SASCredential
+
+// NewSASCredential creates a new instance of [SASCredential] with the specified values.
+// - sas is the shared access signature
+func NewSASCredential(sas string) *SASCredential {
+ return exported.NewSASCredential(sas)
+}
+
+// holds sentinel values used to send nulls
+var nullables map[reflect.Type]any = map[reflect.Type]any{}
+var nullablesMu sync.RWMutex
+
+// NullValue is used to send an explicit 'null' within a request.
+// This is typically used in JSON-MERGE-PATCH operations to delete a value.
+func NullValue[T any]() T {
+ t := shared.TypeOfT[T]()
+
+ nullablesMu.RLock()
+ v, found := nullables[t]
+ nullablesMu.RUnlock()
+
+ if found {
+ // return the sentinel object
+ return v.(T)
+ }
+
+ // promote to exclusive lock and check again (double-checked locking pattern)
+ nullablesMu.Lock()
+ defer nullablesMu.Unlock()
+ v, found = nullables[t]
+
+ if !found {
+ var o reflect.Value
+ if k := t.Kind(); k == reflect.Map {
+ o = reflect.MakeMap(t)
+ } else if k == reflect.Slice {
+ // empty slices appear to all point to the same data block
+ // which causes comparisons to become ambiguous. so we create
+ // a slice with len/cap of one which ensures a unique address.
+ o = reflect.MakeSlice(t, 1, 1)
+ } else {
+ o = reflect.New(t.Elem())
+ }
+ v = o.Interface()
+ nullables[t] = v
+ }
+ // return the sentinel object
+ return v.(T)
+}
+
+// IsNullValue returns true if the field contains a null sentinel value.
+// This is used by custom marshallers to properly encode a null value.
+func IsNullValue[T any](v T) bool {
+ // see if our map has a sentinel object for this *T
+ t := reflect.TypeOf(v)
+ nullablesMu.RLock()
+ defer nullablesMu.RUnlock()
+
+ if o, found := nullables[t]; found {
+ o1 := reflect.ValueOf(o)
+ v1 := reflect.ValueOf(v)
+ // we found it; return true if v points to the sentinel object.
+ // NOTE: maps and slices can only be compared to nil, else you get
+ // a runtime panic. so we compare addresses instead.
+ return o1.Pointer() == v1.Pointer()
+ }
+ // no sentinel object for this *t
+ return false
+}
+
+// ClientOptions contains optional settings for a client's pipeline.
+// Instances can be shared across calls to SDK client constructors when uniform configuration is desired.
+// Zero-value fields will have their specified default values applied during use.
+type ClientOptions = policy.ClientOptions
+
+// Client is a basic HTTP client. It consists of a pipeline and tracing provider.
+type Client struct {
+ pl runtime.Pipeline
+ tr tracing.Tracer
+
+ // cached on the client to support shallow copying with new values
+ tp tracing.Provider
+ modVer string
+ namespace string
+}
+
+// NewClient creates a new Client instance with the provided values.
+// - moduleName - the fully qualified name of the module where the client is defined; used by the telemetry policy and tracing provider.
+// - moduleVersion - the semantic version of the module; used by the telemetry policy and tracing provider.
+// - plOpts - pipeline configuration options; can be the zero-value
+// - options - optional client configurations; pass nil to accept the default values
+func NewClient(moduleName, moduleVersion string, plOpts runtime.PipelineOptions, options *ClientOptions) (*Client, error) {
+ if options == nil {
+ options = &ClientOptions{}
+ }
+
+ if !options.Telemetry.Disabled {
+ if err := shared.ValidateModVer(moduleVersion); err != nil {
+ return nil, err
+ }
+ }
+
+ pl := runtime.NewPipeline(moduleName, moduleVersion, plOpts, options)
+
+ tr := options.TracingProvider.NewTracer(moduleName, moduleVersion)
+ if tr.Enabled() && plOpts.Tracing.Namespace != "" {
+ tr.SetAttributes(tracing.Attribute{Key: shared.TracingNamespaceAttrName, Value: plOpts.Tracing.Namespace})
+ }
+
+ return &Client{
+ pl: pl,
+ tr: tr,
+ tp: options.TracingProvider,
+ modVer: moduleVersion,
+ namespace: plOpts.Tracing.Namespace,
+ }, nil
+}
+
+// Pipeline returns the pipeline for this client.
+func (c *Client) Pipeline() runtime.Pipeline {
+ return c.pl
+}
+
+// Tracer returns the tracer for this client.
+func (c *Client) Tracer() tracing.Tracer {
+ return c.tr
+}
+
+// WithClientName returns a shallow copy of the Client with its tracing client name changed to clientName.
+// Note that the values for module name and version will be preserved from the source Client.
+// - clientName - the fully qualified name of the client ("package.Client"); this is used by the tracing provider when creating spans
+func (c *Client) WithClientName(clientName string) *Client {
+ tr := c.tp.NewTracer(clientName, c.modVer)
+ if tr.Enabled() && c.namespace != "" {
+ tr.SetAttributes(tracing.Attribute{Key: shared.TracingNamespaceAttrName, Value: c.namespace})
+ }
+ return &Client{pl: c.pl, tr: tr, tp: c.tp, modVer: c.modVer, namespace: c.namespace}
+}
@@ -0,0 +1,264 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright 2017 Microsoft Corporation. All rights reserved.
+// Use of this source code is governed by an MIT
+// license that can be found in the LICENSE file.
+
+/*
+Package azcore implements an HTTP request/response middleware pipeline used by Azure SDK clients.
+
+The middleware consists of three components.
+
+ - One or more Policy instances.
+ - A Transporter instance.
+ - A Pipeline instance that combines the Policy and Transporter instances.
+
+# Implementing the Policy Interface
+
+A Policy can be implemented in two ways; as a first-class function for a stateless Policy, or as
+a method on a type for a stateful Policy. Note that HTTP requests made via the same pipeline share
+the same Policy instances, so if a Policy mutates its state it MUST be properly synchronized to
+avoid race conditions.
+
+A Policy's Do method is called when an HTTP request wants to be sent over the network. The Do method can
+perform any operation(s) it desires. For example, it can log the outgoing request, mutate the URL, headers,
+and/or query parameters, inject a failure, etc. Once the Policy has successfully completed its request
+work, it must call the Next() method on the *policy.Request instance in order to pass the request to the
+next Policy in the chain.
+
+When an HTTP response comes back, the Policy then gets a chance to process the response/error. The Policy instance
+can log the response, retry the operation if it failed due to a transient error or timeout, unmarshal the response
+body, etc. Once the Policy has successfully completed its response work, it must return the *http.Response
+and error instances to its caller.
+
+Template for implementing a stateless Policy:
+
+ type policyFunc func(*policy.Request) (*http.Response, error)
+
+ // Do implements the Policy interface on policyFunc.
+ func (pf policyFunc) Do(req *policy.Request) (*http.Response, error) {
+ return pf(req)
+ }
+
+ func NewMyStatelessPolicy() policy.Policy {
+ return policyFunc(func(req *policy.Request) (*http.Response, error) {
+ // TODO: mutate/process Request here
+
+ // forward Request to next Policy & get Response/error
+ resp, err := req.Next()
+
+ // TODO: mutate/process Response/error here
+
+ // return Response/error to previous Policy
+ return resp, err
+ })
+ }
+
+Template for implementing a stateful Policy:
+
+ type MyStatefulPolicy struct {
+ // TODO: add configuration/setting fields here
+ }
+
+ // TODO: add initialization args to NewMyStatefulPolicy()
+ func NewMyStatefulPolicy() policy.Policy {
+ return &MyStatefulPolicy{
+ // TODO: initialize configuration/setting fields here
+ }
+ }
+
+ func (p *MyStatefulPolicy) Do(req *policy.Request) (resp *http.Response, err error) {
+ // TODO: mutate/process Request here
+
+ // forward Request to next Policy & get Response/error
+ resp, err := req.Next()
+
+ // TODO: mutate/process Response/error here
+
+ // return Response/error to previous Policy
+ return resp, err
+ }
+
+# Implementing the Transporter Interface
+
+The Transporter interface is responsible for sending the HTTP request and returning the corresponding
+HTTP response or error. The Transporter is invoked by the last Policy in the chain. The default Transporter
+implementation uses a shared http.Client from the standard library.
+
+The same stateful/stateless rules for Policy implementations apply to Transporter implementations.
+
+# Using Policy and Transporter Instances Via a Pipeline
+
+To use the Policy and Transporter instances, an application passes them to the runtime.NewPipeline function.
+
+ func NewPipeline(transport Transporter, policies ...Policy) Pipeline
+
+The specified Policy instances form a chain and are invoked in the order provided to NewPipeline
+followed by the Transporter.
+
+Once the Pipeline has been created, create a runtime.Request instance and pass it to Pipeline's Do method.
+
+ func NewRequest(ctx context.Context, httpMethod string, endpoint string) (*Request, error)
+
+ func (p Pipeline) Do(req *Request) (*http.Request, error)
+
+The Pipeline.Do method sends the specified Request through the chain of Policy and Transporter
+instances. The response/error is then sent through the same chain of Policy instances in reverse
+order. For example, assuming there are Policy types PolicyA, PolicyB, and PolicyC along with
+TransportA.
+
+ pipeline := NewPipeline(TransportA, PolicyA, PolicyB, PolicyC)
+
+The flow of Request and Response looks like the following:
+
+ policy.Request -> PolicyA -> PolicyB -> PolicyC -> TransportA -----+
+ |
+ HTTP(S) endpoint
+ |
+ caller <--------- PolicyA <- PolicyB <- PolicyC <- http.Response-+
+
+# Creating a Request Instance
+
+The Request instance passed to Pipeline's Do method is a wrapper around an *http.Request. It also
+contains some internal state and provides various convenience methods. You create a Request instance
+by calling the runtime.NewRequest function:
+
+ func NewRequest(ctx context.Context, httpMethod string, endpoint string) (*Request, error)
+
+If the Request should contain a body, call the SetBody method.
+
+ func (req *Request) SetBody(body ReadSeekCloser, contentType string) error
+
+A seekable stream is required so that upon retry, the retry Policy instance can seek the stream
+back to the beginning before retrying the network request and re-uploading the body.
+
+# Sending an Explicit Null
+
+Operations like JSON-MERGE-PATCH send a JSON null to indicate a value should be deleted.
+
+ {
+ "delete-me": null
+ }
+
+This requirement conflicts with the SDK's default marshalling that specifies "omitempty" as
+a means to resolve the ambiguity between a field to be excluded and its zero-value.
+
+ type Widget struct {
+ Name *string `json:",omitempty"`
+ Count *int `json:",omitempty"`
+ }
+
+In the above example, Name and Count are defined as pointer-to-type to disambiguate between
+a missing value (nil) and a zero-value (0) which might have semantic differences.
+
+In a PATCH operation, any fields left as nil are to have their values preserved. When updating
+a Widget's count, one simply specifies the new value for Count, leaving Name nil.
+
+To fulfill the requirement for sending a JSON null, the NullValue() function can be used.
+
+ w := Widget{
+ Count: azcore.NullValue[*int](),
+ }
+
+This sends an explict "null" for Count, indicating that any current value for Count should be deleted.
+
+# Processing the Response
+
+When the HTTP response is received, the *http.Response is returned directly. Each Policy instance
+can inspect/mutate the *http.Response.
+
+# Built-in Logging
+
+To enable logging, set environment variable AZURE_SDK_GO_LOGGING to "all" before executing your program.
+
+By default the logger writes to stderr. This can be customized by calling log.SetListener, providing
+a callback that writes to the desired location. Any custom logging implementation MUST provide its
+own synchronization to handle concurrent invocations.
+
+See the docs for the log package for further details.
+
+# Pageable Operations
+
+Pageable operations return potentially large data sets spread over multiple GET requests. The result of
+each GET is a "page" of data consisting of a slice of items.
+
+Pageable operations can be identified by their New*Pager naming convention and return type of *runtime.Pager[T].
+
+ func (c *WidgetClient) NewListWidgetsPager(o *Options) *runtime.Pager[PageResponse]
+
+The call to WidgetClient.NewListWidgetsPager() returns an instance of *runtime.Pager[T] for fetching pages
+and determining if there are more pages to fetch. No IO calls are made until the NextPage() method is invoked.
+
+ pager := widgetClient.NewListWidgetsPager(nil)
+ for pager.More() {
+ page, err := pager.NextPage(context.TODO())
+ // handle err
+ for _, widget := range page.Values {
+ // process widget
+ }
+ }
+
+# Long-Running Operations
+
+Long-running operations (LROs) are operations consisting of an initial request to start the operation followed
+by polling to determine when the operation has reached a terminal state. An LRO's terminal state is one
+of the following values.
+
+ - Succeeded - the LRO completed successfully
+ - Failed - the LRO failed to complete
+ - Canceled - the LRO was canceled
+
+LROs can be identified by their Begin* prefix and their return type of *runtime.Poller[T].
+
+ func (c *WidgetClient) BeginCreateOrUpdate(ctx context.Context, w Widget, o *Options) (*runtime.Poller[Response], error)
+
+When a call to WidgetClient.BeginCreateOrUpdate() returns a nil error, it means that the LRO has started.
+It does _not_ mean that the widget has been created or updated (or failed to be created/updated).
+
+The *runtime.Poller[T] provides APIs for determining the state of the LRO. To wait for the LRO to complete,
+call the PollUntilDone() method.
+
+ poller, err := widgetClient.BeginCreateOrUpdate(context.TODO(), Widget{}, nil)
+ // handle err
+ result, err := poller.PollUntilDone(context.TODO(), nil)
+ // handle err
+ // use result
+
+The call to PollUntilDone() will block the current goroutine until the LRO has reached a terminal state or the
+context is canceled/timed out.
+
+Note that LROs can take anywhere from several seconds to several minutes. The duration is operation-dependent. Due to
+this variant behavior, pollers do _not_ have a preconfigured time-out. Use a context with the appropriate cancellation
+mechanism as required.
+
+# Resume Tokens
+
+Pollers provide the ability to serialize their state into a "resume token" which can be used by another process to
+recreate the poller. This is achieved via the runtime.Poller[T].ResumeToken() method.
+
+ token, err := poller.ResumeToken()
+ // handle error
+
+Note that a token can only be obtained for a poller that's in a non-terminal state. Also note that any subsequent calls
+to poller.Poll() might change the poller's state. In this case, a new token should be created.
+
+After the token has been obtained, it can be used to recreate an instance of the originating poller.
+
+ poller, err := widgetClient.BeginCreateOrUpdate(nil, Widget{}, &Options{
+ ResumeToken: token,
+ })
+
+When resuming a poller, no IO is performed, and zero-value arguments can be used for everything but the Options.ResumeToken.
+
+Resume tokens are unique per service client and operation. Attempting to resume a poller for LRO BeginB() with a token from LRO
+BeginA() will result in an error.
+
+# Fakes
+
+The fake package contains types used for constructing in-memory fake servers used in unit tests.
+This allows writing tests to cover various success/error conditions without the need for connecting to a live service.
+
+Please see https://github.com/Azure/azure-sdk-for-go/tree/main/sdk/samples/fakes for details and examples on how to use fakes.
+*/
+package azcore
@@ -0,0 +1,17 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azcore
+
+import "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+
+// ResponseError is returned when a request is made to a service and
+// the service returns a non-success HTTP status code.
+// Use errors.As() to access this type in the error chain.
+//
+// When marshaling instances, the RawResponse field will be omitted.
+// However, the contents returned by Error() will be preserved.
+type ResponseError = exported.ResponseError
@@ -0,0 +1,57 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azcore
+
+import (
+ "strings"
+)
+
+// ETag is a property used for optimistic concurrency during updates
+// ETag is a validator based on https://tools.ietf.org/html/rfc7232#section-2.3.2
+// An ETag can be empty ("").
+type ETag string
+
+// ETagAny is an ETag that represents everything, the value is "*"
+const ETagAny ETag = "*"
+
+// Equals does a strong comparison of two ETags. Equals returns true when both
+// ETags are not weak and the values of the underlying strings are equal.
+func (e ETag) Equals(other ETag) bool {
+ return !e.IsWeak() && !other.IsWeak() && e == other
+}
+
+// WeakEquals does a weak comparison of two ETags. Two ETags are equivalent if their opaque-tags match
+// character-by-character, regardless of either or both being tagged as "weak".
+func (e ETag) WeakEquals(other ETag) bool {
+ getStart := func(e1 ETag) int {
+ if e1.IsWeak() {
+ return 2
+ }
+ return 0
+ }
+ aStart := getStart(e)
+ bStart := getStart(other)
+
+ aVal := e[aStart:]
+ bVal := other[bStart:]
+
+ return aVal == bVal
+}
+
+// IsWeak specifies whether the ETag is strong or weak.
+func (e ETag) IsWeak() bool {
+ return len(e) >= 4 && strings.HasPrefix(string(e), "W/\"") && strings.HasSuffix(string(e), "\"")
+}
+
+// MatchConditions specifies HTTP options for conditional requests.
+type MatchConditions struct {
+ // Optionally limit requests to resources that have a matching ETag.
+ IfMatch *ETag
+
+ // Optionally limit requests to resources that do not match the ETag.
+ IfNoneMatch *ETag
+}
@@ -0,0 +1,175 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package exported
+
+import (
+ "context"
+ "encoding/base64"
+ "fmt"
+ "io"
+ "net/http"
+ "sync/atomic"
+ "time"
+)
+
+type nopCloser struct {
+ io.ReadSeeker
+}
+
+func (n nopCloser) Close() error {
+ return nil
+}
+
+// NopCloser returns a ReadSeekCloser with a no-op close method wrapping the provided io.ReadSeeker.
+// Exported as streaming.NopCloser().
+func NopCloser(rs io.ReadSeeker) io.ReadSeekCloser {
+ return nopCloser{rs}
+}
+
+// HasStatusCode returns true if the Response's status code is one of the specified values.
+// Exported as runtime.HasStatusCode().
+func HasStatusCode(resp *http.Response, statusCodes ...int) bool {
+ if resp == nil {
+ return false
+ }
+ for _, sc := range statusCodes {
+ if resp.StatusCode == sc {
+ return true
+ }
+ }
+ return false
+}
+
+// AccessToken represents an Azure service bearer access token with expiry information.
+// Exported as azcore.AccessToken.
+type AccessToken struct {
+ Token string
+ ExpiresOn time.Time
+}
+
+// TokenRequestOptions contain specific parameter that may be used by credentials types when attempting to get a token.
+// Exported as policy.TokenRequestOptions.
+type TokenRequestOptions struct {
+ // Claims are any additional claims required for the token to satisfy a conditional access policy, such as a
+ // service may return in a claims challenge following an authorization failure. If a service returned the
+ // claims value base64 encoded, it must be decoded before setting this field.
+ Claims string
+
+ // EnableCAE indicates whether to enable Continuous Access Evaluation (CAE) for the requested token. When true,
+ // azidentity credentials request CAE tokens for resource APIs supporting CAE. Clients are responsible for
+ // handling CAE challenges. If a client that doesn't handle CAE challenges receives a CAE token, it may end up
+ // in a loop retrying an API call with a token that has been revoked due to CAE.
+ EnableCAE bool
+
+ // Scopes contains the list of permission scopes required for the token.
+ Scopes []string
+
+ // TenantID identifies the tenant from which to request the token. azidentity credentials authenticate in
+ // their configured default tenants when this field isn't set.
+ TenantID string
+}
+
+// TokenCredential represents a credential capable of providing an OAuth token.
+// Exported as azcore.TokenCredential.
+type TokenCredential interface {
+ // GetToken requests an access token for the specified set of scopes.
+ GetToken(ctx context.Context, options TokenRequestOptions) (AccessToken, error)
+}
+
+// DecodeByteArray will base-64 decode the provided string into v.
+// Exported as runtime.DecodeByteArray()
+func DecodeByteArray(s string, v *[]byte, format Base64Encoding) error {
+ if len(s) == 0 {
+ return nil
+ }
+ payload := string(s)
+ if payload[0] == '"' {
+ // remove surrounding quotes
+ payload = payload[1 : len(payload)-1]
+ }
+ switch format {
+ case Base64StdFormat:
+ decoded, err := base64.StdEncoding.DecodeString(payload)
+ if err == nil {
+ *v = decoded
+ return nil
+ }
+ return err
+ case Base64URLFormat:
+ // use raw encoding as URL format should not contain any '=' characters
+ decoded, err := base64.RawURLEncoding.DecodeString(payload)
+ if err == nil {
+ *v = decoded
+ return nil
+ }
+ return err
+ default:
+ return fmt.Errorf("unrecognized byte array format: %d", format)
+ }
+}
+
+// KeyCredential contains an authentication key used to authenticate to an Azure service.
+// Exported as azcore.KeyCredential.
+type KeyCredential struct {
+ cred *keyCredential
+}
+
+// NewKeyCredential creates a new instance of [KeyCredential] with the specified values.
+// - key is the authentication key
+func NewKeyCredential(key string) *KeyCredential {
+ return &KeyCredential{cred: newKeyCredential(key)}
+}
+
+// Update replaces the existing key with the specified value.
+func (k *KeyCredential) Update(key string) {
+ k.cred.Update(key)
+}
+
+// SASCredential contains a shared access signature used to authenticate to an Azure service.
+// Exported as azcore.SASCredential.
+type SASCredential struct {
+ cred *keyCredential
+}
+
+// NewSASCredential creates a new instance of [SASCredential] with the specified values.
+// - sas is the shared access signature
+func NewSASCredential(sas string) *SASCredential {
+ return &SASCredential{cred: newKeyCredential(sas)}
+}
+
+// Update replaces the existing shared access signature with the specified value.
+func (k *SASCredential) Update(sas string) {
+ k.cred.Update(sas)
+}
+
+// KeyCredentialGet returns the key for cred.
+func KeyCredentialGet(cred *KeyCredential) string {
+ return cred.cred.Get()
+}
+
+// SASCredentialGet returns the shared access sig for cred.
+func SASCredentialGet(cred *SASCredential) string {
+ return cred.cred.Get()
+}
+
+type keyCredential struct {
+ key atomic.Value // string
+}
+
+func newKeyCredential(key string) *keyCredential {
+ keyCred := keyCredential{}
+ keyCred.key.Store(key)
+ return &keyCred
+}
+
+func (k *keyCredential) Get() string {
+ return k.key.Load().(string)
+}
+
+func (k *keyCredential) Update(key string) {
+ k.key.Store(key)
+}
@@ -0,0 +1,77 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package exported
+
+import (
+ "errors"
+ "net/http"
+)
+
+// Policy represents an extensibility point for the Pipeline that can mutate the specified
+// Request and react to the received Response.
+// Exported as policy.Policy.
+type Policy interface {
+ // Do applies the policy to the specified Request. When implementing a Policy, mutate the
+ // request before calling req.Next() to move on to the next policy, and respond to the result
+ // before returning to the caller.
+ Do(req *Request) (*http.Response, error)
+}
+
+// Pipeline represents a primitive for sending HTTP requests and receiving responses.
+// Its behavior can be extended by specifying policies during construction.
+// Exported as runtime.Pipeline.
+type Pipeline struct {
+ policies []Policy
+}
+
+// Transporter represents an HTTP pipeline transport used to send HTTP requests and receive responses.
+// Exported as policy.Transporter.
+type Transporter interface {
+ // Do sends the HTTP request and returns the HTTP response or error.
+ Do(req *http.Request) (*http.Response, error)
+}
+
+// used to adapt a TransportPolicy to a Policy
+type transportPolicy struct {
+ trans Transporter
+}
+
+func (tp transportPolicy) Do(req *Request) (*http.Response, error) {
+ if tp.trans == nil {
+ return nil, errors.New("missing transporter")
+ }
+ resp, err := tp.trans.Do(req.Raw())
+ if err != nil {
+ return nil, err
+ } else if resp == nil {
+ // there was no response and no error (rare but can happen)
+ // this ensures the retry policy will retry the request
+ return nil, errors.New("received nil response")
+ }
+ return resp, nil
+}
+
+// NewPipeline creates a new Pipeline object from the specified Policies.
+// Not directly exported, but used as part of runtime.NewPipeline().
+func NewPipeline(transport Transporter, policies ...Policy) Pipeline {
+ // transport policy must always be the last in the slice
+ policies = append(policies, transportPolicy{trans: transport})
+ return Pipeline{
+ policies: policies,
+ }
+}
+
+// Do is called for each and every HTTP request. It passes the request through all
+// the Policy objects (which can transform the Request's URL/query parameters/headers)
+// and ultimately sends the transformed HTTP request over the network.
+func (p Pipeline) Do(req *Request) (*http.Response, error) {
+ if req == nil {
+ return nil, errors.New("request cannot be nil")
+ }
+ req.policies = p.policies
+ return req.Next()
+}
@@ -0,0 +1,260 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package exported
+
+import (
+ "bytes"
+ "context"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "reflect"
+ "strconv"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+)
+
+// Base64Encoding is usesd to specify which base-64 encoder/decoder to use when
+// encoding/decoding a slice of bytes to/from a string.
+// Exported as runtime.Base64Encoding
+type Base64Encoding int
+
+const (
+ // Base64StdFormat uses base64.StdEncoding for encoding and decoding payloads.
+ Base64StdFormat Base64Encoding = 0
+
+ // Base64URLFormat uses base64.RawURLEncoding for encoding and decoding payloads.
+ Base64URLFormat Base64Encoding = 1
+)
+
+// EncodeByteArray will base-64 encode the byte slice v.
+// Exported as runtime.EncodeByteArray()
+func EncodeByteArray(v []byte, format Base64Encoding) string {
+ if format == Base64URLFormat {
+ return base64.RawURLEncoding.EncodeToString(v)
+ }
+ return base64.StdEncoding.EncodeToString(v)
+}
+
+// Request is an abstraction over the creation of an HTTP request as it passes through the pipeline.
+// Don't use this type directly, use NewRequest() instead.
+// Exported as policy.Request.
+type Request struct {
+ req *http.Request
+ body io.ReadSeekCloser
+ policies []Policy
+ values opValues
+}
+
+type opValues map[reflect.Type]any
+
+// Set adds/changes a value
+func (ov opValues) set(value any) {
+ ov[reflect.TypeOf(value)] = value
+}
+
+// Get looks for a value set by SetValue first
+func (ov opValues) get(value any) bool {
+ v, ok := ov[reflect.ValueOf(value).Elem().Type()]
+ if ok {
+ reflect.ValueOf(value).Elem().Set(reflect.ValueOf(v))
+ }
+ return ok
+}
+
+// NewRequestFromRequest creates a new policy.Request with an existing *http.Request
+// Exported as runtime.NewRequestFromRequest().
+func NewRequestFromRequest(req *http.Request) (*Request, error) {
+ policyReq := &Request{req: req}
+
+ if req.Body != nil {
+ // we can avoid a body copy here if the underlying stream is already a
+ // ReadSeekCloser.
+ readSeekCloser, isReadSeekCloser := req.Body.(io.ReadSeekCloser)
+
+ if !isReadSeekCloser {
+ // since this is an already populated http.Request we want to copy
+ // over its body, if it has one.
+ bodyBytes, err := io.ReadAll(req.Body)
+
+ if err != nil {
+ return nil, err
+ }
+
+ if err := req.Body.Close(); err != nil {
+ return nil, err
+ }
+
+ readSeekCloser = NopCloser(bytes.NewReader(bodyBytes))
+ }
+
+ // SetBody also takes care of updating the http.Request's body
+ // as well, so they should stay in-sync from this point.
+ if err := policyReq.SetBody(readSeekCloser, req.Header.Get("Content-Type")); err != nil {
+ return nil, err
+ }
+ }
+
+ return policyReq, nil
+}
+
+// NewRequest creates a new Request with the specified input.
+// Exported as runtime.NewRequest().
+func NewRequest(ctx context.Context, httpMethod string, endpoint string) (*Request, error) {
+ req, err := http.NewRequestWithContext(ctx, httpMethod, endpoint, nil)
+ if err != nil {
+ return nil, err
+ }
+ if req.URL.Host == "" {
+ return nil, errors.New("no Host in request URL")
+ }
+ if !(req.URL.Scheme == "http" || req.URL.Scheme == "https") {
+ return nil, fmt.Errorf("unsupported protocol scheme %s", req.URL.Scheme)
+ }
+ return &Request{req: req}, nil
+}
+
+// Body returns the original body specified when the Request was created.
+func (req *Request) Body() io.ReadSeekCloser {
+ return req.body
+}
+
+// Raw returns the underlying HTTP request.
+func (req *Request) Raw() *http.Request {
+ return req.req
+}
+
+// Next calls the next policy in the pipeline.
+// If there are no more policies, nil and an error are returned.
+// This method is intended to be called from pipeline policies.
+// To send a request through a pipeline call Pipeline.Do().
+func (req *Request) Next() (*http.Response, error) {
+ if len(req.policies) == 0 {
+ return nil, errors.New("no more policies")
+ }
+ nextPolicy := req.policies[0]
+ nextReq := *req
+ nextReq.policies = nextReq.policies[1:]
+ return nextPolicy.Do(&nextReq)
+}
+
+// SetOperationValue adds/changes a mutable key/value associated with a single operation.
+func (req *Request) SetOperationValue(value any) {
+ if req.values == nil {
+ req.values = opValues{}
+ }
+ req.values.set(value)
+}
+
+// OperationValue looks for a value set by SetOperationValue().
+func (req *Request) OperationValue(value any) bool {
+ if req.values == nil {
+ return false
+ }
+ return req.values.get(value)
+}
+
+// SetBody sets the specified ReadSeekCloser as the HTTP request body, and sets Content-Type and Content-Length
+// accordingly. If the ReadSeekCloser is nil or empty, Content-Length won't be set. If contentType is "",
+// Content-Type won't be set, and if it was set, will be deleted.
+// Use streaming.NopCloser to turn an io.ReadSeeker into an io.ReadSeekCloser.
+func (req *Request) SetBody(body io.ReadSeekCloser, contentType string) error {
+ // clobber the existing Content-Type to preserve behavior
+ return SetBody(req, body, contentType, true)
+}
+
+// RewindBody seeks the request's Body stream back to the beginning so it can be resent when retrying an operation.
+func (req *Request) RewindBody() error {
+ if req.body != nil {
+ // Reset the stream back to the beginning and restore the body
+ _, err := req.body.Seek(0, io.SeekStart)
+ req.req.Body = req.body
+ return err
+ }
+ return nil
+}
+
+// Close closes the request body.
+func (req *Request) Close() error {
+ if req.body == nil {
+ return nil
+ }
+ return req.body.Close()
+}
+
+// Clone returns a deep copy of the request with its context changed to ctx.
+func (req *Request) Clone(ctx context.Context) *Request {
+ r2 := *req
+ r2.req = req.req.Clone(ctx)
+ return &r2
+}
+
+// WithContext returns a shallow copy of the request with its context changed to ctx.
+func (req *Request) WithContext(ctx context.Context) *Request {
+ r2 := new(Request)
+ *r2 = *req
+ r2.req = r2.req.WithContext(ctx)
+ return r2
+}
+
+// not exported but dependent on Request
+
+// PolicyFunc is a type that implements the Policy interface.
+// Use this type when implementing a stateless policy as a first-class function.
+type PolicyFunc func(*Request) (*http.Response, error)
+
+// Do implements the Policy interface on policyFunc.
+func (pf PolicyFunc) Do(req *Request) (*http.Response, error) {
+ return pf(req)
+}
+
+// SetBody sets the specified ReadSeekCloser as the HTTP request body, and sets Content-Type and Content-Length accordingly.
+// - req is the request to modify
+// - body is the request body; if nil or empty, Content-Length won't be set
+// - contentType is the value for the Content-Type header; if empty, Content-Type will be deleted
+// - clobberContentType when true, will overwrite the existing value of Content-Type with contentType
+func SetBody(req *Request, body io.ReadSeekCloser, contentType string, clobberContentType bool) error {
+ var err error
+ var size int64
+ if body != nil {
+ size, err = body.Seek(0, io.SeekEnd) // Seek to the end to get the stream's size
+ if err != nil {
+ return err
+ }
+ }
+ if size == 0 {
+ // treat an empty stream the same as a nil one: assign req a nil body
+ body = nil
+ // RFC 9110 specifies a client shouldn't set Content-Length on a request containing no content
+ // (Del is a no-op when the header has no value)
+ req.req.Header.Del(shared.HeaderContentLength)
+ } else {
+ _, err = body.Seek(0, io.SeekStart)
+ if err != nil {
+ return err
+ }
+ req.req.Header.Set(shared.HeaderContentLength, strconv.FormatInt(size, 10))
+ req.Raw().GetBody = func() (io.ReadCloser, error) {
+ _, err := body.Seek(0, io.SeekStart) // Seek back to the beginning of the stream
+ return body, err
+ }
+ }
+ // keep a copy of the body argument. this is to handle cases
+ // where req.Body is replaced, e.g. httputil.DumpRequest and friends.
+ req.body = body
+ req.req.Body = body
+ req.req.ContentLength = size
+ if contentType == "" {
+ // Del is a no-op when the header has no value
+ req.req.Header.Del(shared.HeaderContentType)
+ } else if req.req.Header.Get(shared.HeaderContentType) == "" || clobberContentType {
+ req.req.Header.Set(shared.HeaderContentType, contentType)
+ }
+ return nil
+}
@@ -0,0 +1,201 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package exported
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "net/http"
+ "regexp"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/exported"
+)
+
+// NewResponseError creates a new *ResponseError from the provided HTTP response.
+// Exported as runtime.NewResponseError().
+func NewResponseError(resp *http.Response) error {
+ // prefer the error code in the response header
+ if ec := resp.Header.Get(shared.HeaderXMSErrorCode); ec != "" {
+ return NewResponseErrorWithErrorCode(resp, ec)
+ }
+
+ // if we didn't get x-ms-error-code, check in the response body
+ body, err := exported.Payload(resp, nil)
+ if err != nil {
+ // since we're not returning the ResponseError in this
+ // case we also don't want to write it to the log.
+ return err
+ }
+
+ var errorCode string
+ if len(body) > 0 {
+ if fromJSON := extractErrorCodeJSON(body); fromJSON != "" {
+ errorCode = fromJSON
+ } else if fromXML := extractErrorCodeXML(body); fromXML != "" {
+ errorCode = fromXML
+ }
+ }
+
+ return NewResponseErrorWithErrorCode(resp, errorCode)
+}
+
+// NewResponseErrorWithErrorCode creates an *azcore.ResponseError from the provided HTTP response and errorCode.
+// Exported as runtime.NewResponseErrorWithErrorCode().
+func NewResponseErrorWithErrorCode(resp *http.Response, errorCode string) error {
+ respErr := &ResponseError{
+ ErrorCode: errorCode,
+ StatusCode: resp.StatusCode,
+ RawResponse: resp,
+ }
+ log.Write(log.EventResponseError, respErr.Error())
+ return respErr
+}
+
+func extractErrorCodeJSON(body []byte) string {
+ var rawObj map[string]any
+ if err := json.Unmarshal(body, &rawObj); err != nil {
+ // not a JSON object
+ return ""
+ }
+
+ // check if this is a wrapped error, i.e. { "error": { ... } }
+ // if so then unwrap it
+ if wrapped, ok := rawObj["error"]; ok {
+ unwrapped, ok := wrapped.(map[string]any)
+ if !ok {
+ return ""
+ }
+ rawObj = unwrapped
+ } else if wrapped, ok := rawObj["odata.error"]; ok {
+ // check if this a wrapped odata error, i.e. { "odata.error": { ... } }
+ unwrapped, ok := wrapped.(map[string]any)
+ if !ok {
+ return ""
+ }
+ rawObj = unwrapped
+ }
+
+ // now check for the error code
+ code, ok := rawObj["code"]
+ if !ok {
+ return ""
+ }
+ codeStr, ok := code.(string)
+ if !ok {
+ return ""
+ }
+ return codeStr
+}
+
+func extractErrorCodeXML(body []byte) string {
+ // regular expression is much easier than dealing with the XML parser
+ rx := regexp.MustCompile(`<(?:\w+:)?[c|C]ode>\s*(\w+)\s*<\/(?:\w+:)?[c|C]ode>`)
+ res := rx.FindStringSubmatch(string(body))
+ if len(res) != 2 {
+ return ""
+ }
+ // first submatch is the entire thing, second one is the captured error code
+ return res[1]
+}
+
+// ResponseError is returned when a request is made to a service and
+// the service returns a non-success HTTP status code.
+// Use errors.As() to access this type in the error chain.
+// Exported as azcore.ResponseError.
+type ResponseError struct {
+ // ErrorCode is the error code returned by the resource provider if available.
+ ErrorCode string
+
+ // StatusCode is the HTTP status code as defined in https://pkg.go.dev/net/http#pkg-constants.
+ StatusCode int
+
+ // RawResponse is the underlying HTTP response.
+ RawResponse *http.Response `json:"-"`
+
+ errMsg string
+}
+
+// Error implements the error interface for type ResponseError.
+// Note that the message contents are not contractual and can change over time.
+func (e *ResponseError) Error() string {
+ if e.errMsg != "" {
+ return e.errMsg
+ }
+
+ const separator = "--------------------------------------------------------------------------------"
+ // write the request method and URL with response status code
+ msg := &bytes.Buffer{}
+ if e.RawResponse != nil {
+ if e.RawResponse.Request != nil {
+ fmt.Fprintf(msg, "%s %s://%s%s\n", e.RawResponse.Request.Method, e.RawResponse.Request.URL.Scheme, e.RawResponse.Request.URL.Host, e.RawResponse.Request.URL.Path)
+ } else {
+ fmt.Fprintln(msg, "Request information not available")
+ }
+ fmt.Fprintln(msg, separator)
+ fmt.Fprintf(msg, "RESPONSE %d: %s\n", e.RawResponse.StatusCode, e.RawResponse.Status)
+ } else {
+ fmt.Fprintln(msg, "Missing RawResponse")
+ fmt.Fprintln(msg, separator)
+ }
+ if e.ErrorCode != "" {
+ fmt.Fprintf(msg, "ERROR CODE: %s\n", e.ErrorCode)
+ } else {
+ fmt.Fprintln(msg, "ERROR CODE UNAVAILABLE")
+ }
+ if e.RawResponse != nil {
+ fmt.Fprintln(msg, separator)
+ body, err := exported.Payload(e.RawResponse, nil)
+ if err != nil {
+ // this really shouldn't fail at this point as the response
+ // body is already cached (it was read in NewResponseError)
+ fmt.Fprintf(msg, "Error reading response body: %v", err)
+ } else if len(body) > 0 {
+ if err := json.Indent(msg, body, "", " "); err != nil {
+ // failed to pretty-print so just dump it verbatim
+ fmt.Fprint(msg, string(body))
+ }
+ // the standard library doesn't have a pretty-printer for XML
+ fmt.Fprintln(msg)
+ } else {
+ fmt.Fprintln(msg, "Response contained no body")
+ }
+ }
+ fmt.Fprintln(msg, separator)
+
+ e.errMsg = msg.String()
+ return e.errMsg
+}
+
+// internal type used for marshaling/unmarshaling
+type responseError struct {
+ ErrorCode string `json:"errorCode"`
+ StatusCode int `json:"statusCode"`
+ ErrorMessage string `json:"errorMessage"`
+}
+
+func (e ResponseError) MarshalJSON() ([]byte, error) {
+ return json.Marshal(responseError{
+ ErrorCode: e.ErrorCode,
+ StatusCode: e.StatusCode,
+ ErrorMessage: e.Error(),
+ })
+}
+
+func (e *ResponseError) UnmarshalJSON(data []byte) error {
+ re := responseError{}
+ if err := json.Unmarshal(data, &re); err != nil {
+ return err
+ }
+
+ e.ErrorCode = re.ErrorCode
+ e.StatusCode = re.StatusCode
+ e.errMsg = re.ErrorMessage
+ return nil
+}
@@ -0,0 +1,50 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+// This is an internal helper package to combine the complete logging APIs.
+package log
+
+import (
+ azlog "github.com/Azure/azure-sdk-for-go/sdk/azcore/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+)
+
+type Event = log.Event
+
+const (
+ EventRequest = azlog.EventRequest
+ EventResponse = azlog.EventResponse
+ EventResponseError = azlog.EventResponseError
+ EventRetryPolicy = azlog.EventRetryPolicy
+ EventLRO = azlog.EventLRO
+)
+
+// Write invokes the underlying listener with the specified event and message.
+// If the event shouldn't be logged or there is no listener then Write does nothing.
+func Write(cls log.Event, msg string) {
+ log.Write(cls, msg)
+}
+
+// Writef invokes the underlying listener with the specified event and formatted message.
+// If the event shouldn't be logged or there is no listener then Writef does nothing.
+func Writef(cls log.Event, format string, a ...any) {
+ log.Writef(cls, format, a...)
+}
+
+// SetListener will set the Logger to write to the specified listener.
+func SetListener(lst func(Event, string)) {
+ log.SetListener(lst)
+}
+
+// Should returns true if the specified log event should be written to the log.
+// By default all log events will be logged. Call SetEvents() to limit
+// the log events for logging.
+// If no listener has been set this will return false.
+// Calling this method is useful when the message to log is computationally expensive
+// and you want to avoid the overhead if its log event is not enabled.
+func Should(cls log.Event) bool {
+ return log.Should(cls)
+}
@@ -0,0 +1,159 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package async
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/poller"
+)
+
+// see https://github.com/Azure/azure-resource-manager-rpc/blob/master/v1.0/async-api-reference.md
+
+// Applicable returns true if the LRO is using Azure-AsyncOperation.
+func Applicable(resp *http.Response) bool {
+ return resp.Header.Get(shared.HeaderAzureAsync) != ""
+}
+
+// CanResume returns true if the token can rehydrate this poller type.
+func CanResume(token map[string]any) bool {
+ _, ok := token["asyncURL"]
+ return ok
+}
+
+// Poller is an LRO poller that uses the Azure-AsyncOperation pattern.
+type Poller[T any] struct {
+ pl exported.Pipeline
+
+ resp *http.Response
+
+ // The URL from Azure-AsyncOperation header.
+ AsyncURL string `json:"asyncURL"`
+
+ // The URL from Location header.
+ LocURL string `json:"locURL"`
+
+ // The URL from the initial LRO request.
+ OrigURL string `json:"origURL"`
+
+ // The HTTP method from the initial LRO request.
+ Method string `json:"method"`
+
+ // The value of final-state-via from swagger, can be the empty string.
+ FinalState pollers.FinalStateVia `json:"finalState"`
+
+ // The LRO's current state.
+ CurState string `json:"state"`
+}
+
+// New creates a new Poller from the provided initial response and final-state type.
+// Pass nil for response to create an empty Poller for rehydration.
+func New[T any](pl exported.Pipeline, resp *http.Response, finalState pollers.FinalStateVia) (*Poller[T], error) {
+ if resp == nil {
+ log.Write(log.EventLRO, "Resuming Azure-AsyncOperation poller.")
+ return &Poller[T]{pl: pl}, nil
+ }
+ log.Write(log.EventLRO, "Using Azure-AsyncOperation poller.")
+ asyncURL := resp.Header.Get(shared.HeaderAzureAsync)
+ if asyncURL == "" {
+ return nil, errors.New("response is missing Azure-AsyncOperation header")
+ }
+ if !poller.IsValidURL(asyncURL) {
+ return nil, fmt.Errorf("invalid polling URL %s", asyncURL)
+ }
+ // check for provisioning state. if the operation is a RELO
+ // and terminates synchronously this will prevent extra polling.
+ // it's ok if there's no provisioning state.
+ state, _ := poller.GetProvisioningState(resp)
+ if state == "" {
+ state = poller.StatusInProgress
+ }
+ p := &Poller[T]{
+ pl: pl,
+ resp: resp,
+ AsyncURL: asyncURL,
+ LocURL: resp.Header.Get(shared.HeaderLocation),
+ OrigURL: resp.Request.URL.String(),
+ Method: resp.Request.Method,
+ FinalState: finalState,
+ CurState: state,
+ }
+ return p, nil
+}
+
+// Done returns true if the LRO is in a terminal state.
+func (p *Poller[T]) Done() bool {
+ return poller.IsTerminalState(p.CurState)
+}
+
+// Poll retrieves the current state of the LRO.
+func (p *Poller[T]) Poll(ctx context.Context) (*http.Response, error) {
+ err := pollers.PollHelper(ctx, p.AsyncURL, p.pl, func(resp *http.Response) (string, error) {
+ if !poller.StatusCodeValid(resp) {
+ p.resp = resp
+ return "", exported.NewResponseError(resp)
+ }
+ state, err := poller.GetStatus(resp)
+ if err != nil {
+ return "", err
+ } else if state == "" {
+ return "", errors.New("the response did not contain a status")
+ }
+ p.resp = resp
+ p.CurState = state
+ return p.CurState, nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ return p.resp, nil
+}
+
+func (p *Poller[T]) Result(ctx context.Context, out *T) error {
+ if p.resp.StatusCode == http.StatusNoContent {
+ return nil
+ } else if poller.Failed(p.CurState) {
+ return exported.NewResponseError(p.resp)
+ }
+ var req *exported.Request
+ var err error
+ if p.Method == http.MethodPatch || p.Method == http.MethodPut {
+ // for PATCH and PUT, the final GET is on the original resource URL
+ req, err = exported.NewRequest(ctx, http.MethodGet, p.OrigURL)
+ } else if p.Method == http.MethodPost {
+ if p.FinalState == pollers.FinalStateViaAzureAsyncOp {
+ // no final GET required
+ } else if p.FinalState == pollers.FinalStateViaOriginalURI {
+ req, err = exported.NewRequest(ctx, http.MethodGet, p.OrigURL)
+ } else if p.LocURL != "" {
+ // ideally FinalState would be set to "location" but it isn't always.
+ // must check last due to more permissive condition.
+ req, err = exported.NewRequest(ctx, http.MethodGet, p.LocURL)
+ }
+ }
+ if err != nil {
+ return err
+ }
+
+ // if a final GET request has been created, execute it
+ if req != nil {
+ resp, err := p.pl.Do(req)
+ if err != nil {
+ return err
+ }
+ p.resp = resp
+ }
+
+ return pollers.ResultHelper(p.resp, poller.Failed(p.CurState), "", out)
+}
@@ -0,0 +1,135 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package body
+
+import (
+ "context"
+ "errors"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/poller"
+)
+
+// Kind is the identifier of this type in a resume token.
+const kind = "body"
+
+// Applicable returns true if the LRO is using no headers, just provisioning state.
+// This is only applicable to PATCH and PUT methods and assumes no polling headers.
+func Applicable(resp *http.Response) bool {
+ // we can't check for absense of headers due to some misbehaving services
+ // like redis that return a Location header but don't actually use that protocol
+ return resp.Request.Method == http.MethodPatch || resp.Request.Method == http.MethodPut
+}
+
+// CanResume returns true if the token can rehydrate this poller type.
+func CanResume(token map[string]any) bool {
+ t, ok := token["type"]
+ if !ok {
+ return false
+ }
+ tt, ok := t.(string)
+ if !ok {
+ return false
+ }
+ return tt == kind
+}
+
+// Poller is an LRO poller that uses the Body pattern.
+type Poller[T any] struct {
+ pl exported.Pipeline
+
+ resp *http.Response
+
+ // The poller's type, used for resume token processing.
+ Type string `json:"type"`
+
+ // The URL for polling.
+ PollURL string `json:"pollURL"`
+
+ // The LRO's current state.
+ CurState string `json:"state"`
+}
+
+// New creates a new Poller from the provided initial response.
+// Pass nil for response to create an empty Poller for rehydration.
+func New[T any](pl exported.Pipeline, resp *http.Response) (*Poller[T], error) {
+ if resp == nil {
+ log.Write(log.EventLRO, "Resuming Body poller.")
+ return &Poller[T]{pl: pl}, nil
+ }
+ log.Write(log.EventLRO, "Using Body poller.")
+ p := &Poller[T]{
+ pl: pl,
+ resp: resp,
+ Type: kind,
+ PollURL: resp.Request.URL.String(),
+ }
+ // default initial state to InProgress. depending on the HTTP
+ // status code and provisioning state, we might change the value.
+ curState := poller.StatusInProgress
+ provState, err := poller.GetProvisioningState(resp)
+ if err != nil && !errors.Is(err, poller.ErrNoBody) {
+ return nil, err
+ }
+ if resp.StatusCode == http.StatusCreated && provState != "" {
+ // absense of provisioning state is ok for a 201, means the operation is in progress
+ curState = provState
+ } else if resp.StatusCode == http.StatusOK {
+ if provState != "" {
+ curState = provState
+ } else if provState == "" {
+ // for a 200, absense of provisioning state indicates success
+ curState = poller.StatusSucceeded
+ }
+ } else if resp.StatusCode == http.StatusNoContent {
+ curState = poller.StatusSucceeded
+ }
+ p.CurState = curState
+ return p, nil
+}
+
+func (p *Poller[T]) Done() bool {
+ return poller.IsTerminalState(p.CurState)
+}
+
+func (p *Poller[T]) Poll(ctx context.Context) (*http.Response, error) {
+ err := pollers.PollHelper(ctx, p.PollURL, p.pl, func(resp *http.Response) (string, error) {
+ if !poller.StatusCodeValid(resp) {
+ p.resp = resp
+ return "", exported.NewResponseError(resp)
+ }
+ if resp.StatusCode == http.StatusNoContent {
+ p.resp = resp
+ p.CurState = poller.StatusSucceeded
+ return p.CurState, nil
+ }
+ state, err := poller.GetProvisioningState(resp)
+ if errors.Is(err, poller.ErrNoBody) {
+ // a missing response body in non-204 case is an error
+ return "", err
+ } else if state == "" {
+ // a response body without provisioning state is considered terminal success
+ state = poller.StatusSucceeded
+ } else if err != nil {
+ return "", err
+ }
+ p.resp = resp
+ p.CurState = state
+ return p.CurState, nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ return p.resp, nil
+}
+
+func (p *Poller[T]) Result(ctx context.Context, out *T) error {
+ return pollers.ResultHelper(p.resp, poller.Failed(p.CurState), "", out)
+}
@@ -0,0 +1,133 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package fake
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/poller"
+)
+
+// Applicable returns true if the LRO is a fake.
+func Applicable(resp *http.Response) bool {
+ return resp.Header.Get(shared.HeaderFakePollerStatus) != ""
+}
+
+// CanResume returns true if the token can rehydrate this poller type.
+func CanResume(token map[string]any) bool {
+ _, ok := token["fakeURL"]
+ return ok
+}
+
+// Poller is an LRO poller that uses the Core-Fake-Poller pattern.
+type Poller[T any] struct {
+ pl exported.Pipeline
+
+ resp *http.Response
+
+ // The API name from CtxAPINameKey
+ APIName string `json:"apiName"`
+
+ // The URL from Core-Fake-Poller header.
+ FakeURL string `json:"fakeURL"`
+
+ // The LRO's current state.
+ FakeStatus string `json:"status"`
+}
+
+// lroStatusURLSuffix is the URL path suffix for a faked LRO.
+const lroStatusURLSuffix = "/get/fake/status"
+
+// New creates a new Poller from the provided initial response.
+// Pass nil for response to create an empty Poller for rehydration.
+func New[T any](pl exported.Pipeline, resp *http.Response) (*Poller[T], error) {
+ if resp == nil {
+ log.Write(log.EventLRO, "Resuming Core-Fake-Poller poller.")
+ return &Poller[T]{pl: pl}, nil
+ }
+
+ log.Write(log.EventLRO, "Using Core-Fake-Poller poller.")
+ fakeStatus := resp.Header.Get(shared.HeaderFakePollerStatus)
+ if fakeStatus == "" {
+ return nil, errors.New("response is missing Fake-Poller-Status header")
+ }
+
+ ctxVal := resp.Request.Context().Value(shared.CtxAPINameKey{})
+ if ctxVal == nil {
+ return nil, errors.New("missing value for CtxAPINameKey")
+ }
+
+ apiName, ok := ctxVal.(string)
+ if !ok {
+ return nil, fmt.Errorf("expected string for CtxAPINameKey, the type was %T", ctxVal)
+ }
+
+ qp := ""
+ if resp.Request.URL.RawQuery != "" {
+ qp = "?" + resp.Request.URL.RawQuery
+ }
+
+ p := &Poller[T]{
+ pl: pl,
+ resp: resp,
+ APIName: apiName,
+ // NOTE: any changes to this path format MUST be reflected in SanitizePollerPath()
+ FakeURL: fmt.Sprintf("%s://%s%s%s%s", resp.Request.URL.Scheme, resp.Request.URL.Host, resp.Request.URL.Path, lroStatusURLSuffix, qp),
+ FakeStatus: fakeStatus,
+ }
+ return p, nil
+}
+
+// Done returns true if the LRO is in a terminal state.
+func (p *Poller[T]) Done() bool {
+ return poller.IsTerminalState(p.FakeStatus)
+}
+
+// Poll retrieves the current state of the LRO.
+func (p *Poller[T]) Poll(ctx context.Context) (*http.Response, error) {
+ ctx = context.WithValue(ctx, shared.CtxAPINameKey{}, p.APIName)
+ err := pollers.PollHelper(ctx, p.FakeURL, p.pl, func(resp *http.Response) (string, error) {
+ if !poller.StatusCodeValid(resp) {
+ p.resp = resp
+ return "", exported.NewResponseError(resp)
+ }
+ fakeStatus := resp.Header.Get(shared.HeaderFakePollerStatus)
+ if fakeStatus == "" {
+ return "", errors.New("response is missing Fake-Poller-Status header")
+ }
+ p.resp = resp
+ p.FakeStatus = fakeStatus
+ return p.FakeStatus, nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ return p.resp, nil
+}
+
+func (p *Poller[T]) Result(ctx context.Context, out *T) error {
+ if p.resp.StatusCode == http.StatusNoContent {
+ return nil
+ } else if poller.Failed(p.FakeStatus) {
+ return exported.NewResponseError(p.resp)
+ }
+
+ return pollers.ResultHelper(p.resp, poller.Failed(p.FakeStatus), "", out)
+}
+
+// SanitizePollerPath removes any fake-appended suffix from a URL's path.
+func SanitizePollerPath(path string) string {
+ return strings.TrimSuffix(path, lroStatusURLSuffix)
+}
@@ -0,0 +1,123 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package loc
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/poller"
+)
+
+// Kind is the identifier of this type in a resume token.
+const kind = "loc"
+
+// Applicable returns true if the LRO is using Location.
+func Applicable(resp *http.Response) bool {
+ return resp.Header.Get(shared.HeaderLocation) != ""
+}
+
+// CanResume returns true if the token can rehydrate this poller type.
+func CanResume(token map[string]any) bool {
+ t, ok := token["type"]
+ if !ok {
+ return false
+ }
+ tt, ok := t.(string)
+ if !ok {
+ return false
+ }
+ return tt == kind
+}
+
+// Poller is an LRO poller that uses the Location pattern.
+type Poller[T any] struct {
+ pl exported.Pipeline
+ resp *http.Response
+
+ Type string `json:"type"`
+ PollURL string `json:"pollURL"`
+ CurState string `json:"state"`
+}
+
+// New creates a new Poller from the provided initial response.
+// Pass nil for response to create an empty Poller for rehydration.
+func New[T any](pl exported.Pipeline, resp *http.Response) (*Poller[T], error) {
+ if resp == nil {
+ log.Write(log.EventLRO, "Resuming Location poller.")
+ return &Poller[T]{pl: pl}, nil
+ }
+ log.Write(log.EventLRO, "Using Location poller.")
+ locURL := resp.Header.Get(shared.HeaderLocation)
+ if locURL == "" {
+ return nil, errors.New("response is missing Location header")
+ }
+ if !poller.IsValidURL(locURL) {
+ return nil, fmt.Errorf("invalid polling URL %s", locURL)
+ }
+ // check for provisioning state. if the operation is a RELO
+ // and terminates synchronously this will prevent extra polling.
+ // it's ok if there's no provisioning state.
+ state, _ := poller.GetProvisioningState(resp)
+ if state == "" {
+ state = poller.StatusInProgress
+ }
+ return &Poller[T]{
+ pl: pl,
+ resp: resp,
+ Type: kind,
+ PollURL: locURL,
+ CurState: state,
+ }, nil
+}
+
+func (p *Poller[T]) Done() bool {
+ return poller.IsTerminalState(p.CurState)
+}
+
+func (p *Poller[T]) Poll(ctx context.Context) (*http.Response, error) {
+ err := pollers.PollHelper(ctx, p.PollURL, p.pl, func(resp *http.Response) (string, error) {
+ // location polling can return an updated polling URL
+ if h := resp.Header.Get(shared.HeaderLocation); h != "" {
+ p.PollURL = h
+ }
+ // if provisioning state is available, use that. this is only
+ // for some ARM LRO scenarios (e.g. DELETE with a Location header)
+ // so if it's missing then use HTTP status code.
+ provState, _ := poller.GetProvisioningState(resp)
+ p.resp = resp
+ if provState != "" {
+ p.CurState = provState
+ } else if resp.StatusCode == http.StatusAccepted {
+ p.CurState = poller.StatusInProgress
+ } else if resp.StatusCode > 199 && resp.StatusCode < 300 {
+ // any 2xx other than a 202 indicates success
+ p.CurState = poller.StatusSucceeded
+ } else if pollers.IsNonTerminalHTTPStatusCode(resp) {
+ // the request timed out or is being throttled.
+ // DO NOT include this as a terminal failure. preserve
+ // the existing state and return the response.
+ } else {
+ p.CurState = poller.StatusFailed
+ }
+ return p.CurState, nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ return p.resp, nil
+}
+
+func (p *Poller[T]) Result(ctx context.Context, out *T) error {
+ return pollers.ResultHelper(p.resp, poller.Failed(p.CurState), "", out)
+}
@@ -0,0 +1,148 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package op
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/poller"
+)
+
+// Applicable returns true if the LRO is using Operation-Location.
+func Applicable(resp *http.Response) bool {
+ return resp.Header.Get(shared.HeaderOperationLocation) != ""
+}
+
+// CanResume returns true if the token can rehydrate this poller type.
+func CanResume(token map[string]any) bool {
+ _, ok := token["oplocURL"]
+ return ok
+}
+
+// Poller is an LRO poller that uses the Operation-Location pattern.
+type Poller[T any] struct {
+ pl exported.Pipeline
+ resp *http.Response
+
+ OpLocURL string `json:"oplocURL"`
+ LocURL string `json:"locURL"`
+ OrigURL string `json:"origURL"`
+ Method string `json:"method"`
+ FinalState pollers.FinalStateVia `json:"finalState"`
+ ResultPath string `json:"resultPath"`
+ CurState string `json:"state"`
+}
+
+// New creates a new Poller from the provided initial response.
+// Pass nil for response to create an empty Poller for rehydration.
+func New[T any](pl exported.Pipeline, resp *http.Response, finalState pollers.FinalStateVia, resultPath string) (*Poller[T], error) {
+ if resp == nil {
+ log.Write(log.EventLRO, "Resuming Operation-Location poller.")
+ return &Poller[T]{pl: pl}, nil
+ }
+ log.Write(log.EventLRO, "Using Operation-Location poller.")
+ opURL := resp.Header.Get(shared.HeaderOperationLocation)
+ if opURL == "" {
+ return nil, errors.New("response is missing Operation-Location header")
+ }
+ if !poller.IsValidURL(opURL) {
+ return nil, fmt.Errorf("invalid Operation-Location URL %s", opURL)
+ }
+ locURL := resp.Header.Get(shared.HeaderLocation)
+ // Location header is optional
+ if locURL != "" && !poller.IsValidURL(locURL) {
+ return nil, fmt.Errorf("invalid Location URL %s", locURL)
+ }
+ // default initial state to InProgress. if the
+ // service sent us a status then use that instead.
+ curState := poller.StatusInProgress
+ status, err := poller.GetStatus(resp)
+ if err != nil && !errors.Is(err, poller.ErrNoBody) {
+ return nil, err
+ }
+ if status != "" {
+ curState = status
+ }
+
+ return &Poller[T]{
+ pl: pl,
+ resp: resp,
+ OpLocURL: opURL,
+ LocURL: locURL,
+ OrigURL: resp.Request.URL.String(),
+ Method: resp.Request.Method,
+ FinalState: finalState,
+ ResultPath: resultPath,
+ CurState: curState,
+ }, nil
+}
+
+func (p *Poller[T]) Done() bool {
+ return poller.IsTerminalState(p.CurState)
+}
+
+func (p *Poller[T]) Poll(ctx context.Context) (*http.Response, error) {
+ err := pollers.PollHelper(ctx, p.OpLocURL, p.pl, func(resp *http.Response) (string, error) {
+ if !poller.StatusCodeValid(resp) {
+ p.resp = resp
+ return "", exported.NewResponseError(resp)
+ }
+ state, err := poller.GetStatus(resp)
+ if err != nil {
+ return "", err
+ } else if state == "" {
+ return "", errors.New("the response did not contain a status")
+ }
+ p.resp = resp
+ p.CurState = state
+ return p.CurState, nil
+ })
+ if err != nil {
+ return nil, err
+ }
+ return p.resp, nil
+}
+
+func (p *Poller[T]) Result(ctx context.Context, out *T) error {
+ var req *exported.Request
+ var err error
+
+ if p.FinalState == pollers.FinalStateViaLocation && p.LocURL != "" {
+ req, err = exported.NewRequest(ctx, http.MethodGet, p.LocURL)
+ } else if rl, rlErr := poller.GetResourceLocation(p.resp); rlErr != nil && !errors.Is(rlErr, poller.ErrNoBody) {
+ return rlErr
+ } else if rl != "" {
+ req, err = exported.NewRequest(ctx, http.MethodGet, rl)
+ } else if p.Method == http.MethodPatch || p.Method == http.MethodPut {
+ req, err = exported.NewRequest(ctx, http.MethodGet, p.OrigURL)
+ } else if p.Method == http.MethodPost && p.LocURL != "" {
+ req, err = exported.NewRequest(ctx, http.MethodGet, p.LocURL)
+ }
+ if err != nil {
+ return err
+ }
+
+ // if a final GET request has been created, execute it
+ if req != nil {
+ // no JSON path when making a final GET request
+ p.ResultPath = ""
+ resp, err := p.pl.Do(req)
+ if err != nil {
+ return err
+ }
+ p.resp = resp
+ }
+
+ return pollers.ResultHelper(p.resp, poller.Failed(p.CurState), p.ResultPath, out)
+}
@@ -0,0 +1,24 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package pollers
+
+// FinalStateVia is the enumerated type for the possible final-state-via values.
+type FinalStateVia string
+
+const (
+ // FinalStateViaAzureAsyncOp indicates the final payload comes from the Azure-AsyncOperation URL.
+ FinalStateViaAzureAsyncOp FinalStateVia = "azure-async-operation"
+
+ // FinalStateViaLocation indicates the final payload comes from the Location URL.
+ FinalStateViaLocation FinalStateVia = "location"
+
+ // FinalStateViaOriginalURI indicates the final payload comes from the original URL.
+ FinalStateViaOriginalURI FinalStateVia = "original-uri"
+
+ // FinalStateViaOpLocation indicates the final payload comes from the Operation-Location URL.
+ FinalStateViaOpLocation FinalStateVia = "operation-location"
+)
@@ -0,0 +1,212 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package pollers
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "reflect"
+
+ azexported "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/poller"
+)
+
+// getTokenTypeName creates a type name from the type parameter T.
+func getTokenTypeName[T any]() (string, error) {
+ tt := shared.TypeOfT[T]()
+ var n string
+ if tt.Kind() == reflect.Pointer {
+ n = "*"
+ tt = tt.Elem()
+ }
+ n += tt.Name()
+ if n == "" {
+ return "", errors.New("nameless types are not allowed")
+ }
+ return n, nil
+}
+
+type resumeTokenWrapper[T any] struct {
+ Type string `json:"type"`
+ Token T `json:"token"`
+}
+
+// NewResumeToken creates a resume token from the specified type.
+// An error is returned if the generic type has no name (e.g. struct{}).
+func NewResumeToken[TResult, TSource any](from TSource) (string, error) {
+ n, err := getTokenTypeName[TResult]()
+ if err != nil {
+ return "", err
+ }
+ b, err := json.Marshal(resumeTokenWrapper[TSource]{
+ Type: n,
+ Token: from,
+ })
+ if err != nil {
+ return "", err
+ }
+ return string(b), nil
+}
+
+// ExtractToken returns the poller-specific token information from the provided token value.
+func ExtractToken(token string) ([]byte, error) {
+ raw := map[string]json.RawMessage{}
+ if err := json.Unmarshal([]byte(token), &raw); err != nil {
+ return nil, err
+ }
+ // this is dependent on the type resumeTokenWrapper[T]
+ tk, ok := raw["token"]
+ if !ok {
+ return nil, errors.New("missing token value")
+ }
+ return tk, nil
+}
+
+// IsTokenValid returns an error if the specified token isn't applicable for generic type T.
+func IsTokenValid[T any](token string) error {
+ raw := map[string]any{}
+ if err := json.Unmarshal([]byte(token), &raw); err != nil {
+ return err
+ }
+ t, ok := raw["type"]
+ if !ok {
+ return errors.New("missing type value")
+ }
+ tt, ok := t.(string)
+ if !ok {
+ return fmt.Errorf("invalid type format %T", t)
+ }
+ n, err := getTokenTypeName[T]()
+ if err != nil {
+ return err
+ }
+ if tt != n {
+ return fmt.Errorf("cannot resume from this poller token. token is for type %s, not %s", tt, n)
+ }
+ return nil
+}
+
+// used if the operation synchronously completed
+type NopPoller[T any] struct {
+ resp *http.Response
+ result T
+}
+
+// NewNopPoller creates a NopPoller from the provided response.
+// It unmarshals the response body into an instance of T.
+func NewNopPoller[T any](resp *http.Response) (*NopPoller[T], error) {
+ np := &NopPoller[T]{resp: resp}
+ if resp.StatusCode == http.StatusNoContent {
+ return np, nil
+ }
+ payload, err := exported.Payload(resp, nil)
+ if err != nil {
+ return nil, err
+ }
+ if len(payload) == 0 {
+ return np, nil
+ }
+ if err = json.Unmarshal(payload, &np.result); err != nil {
+ return nil, err
+ }
+ return np, nil
+}
+
+func (*NopPoller[T]) Done() bool {
+ return true
+}
+
+func (p *NopPoller[T]) Poll(context.Context) (*http.Response, error) {
+ return p.resp, nil
+}
+
+func (p *NopPoller[T]) Result(ctx context.Context, out *T) error {
+ *out = p.result
+ return nil
+}
+
+// PollHelper creates and executes the request, calling update() with the response.
+// If the request fails, the update func is not called.
+// The update func returns the state of the operation for logging purposes or an error
+// if it fails to extract the required state from the response.
+func PollHelper(ctx context.Context, endpoint string, pl azexported.Pipeline, update func(resp *http.Response) (string, error)) error {
+ req, err := azexported.NewRequest(ctx, http.MethodGet, endpoint)
+ if err != nil {
+ return err
+ }
+ resp, err := pl.Do(req)
+ if err != nil {
+ return err
+ }
+ state, err := update(resp)
+ if err != nil {
+ return err
+ }
+ log.Writef(log.EventLRO, "State %s", state)
+ return nil
+}
+
+// ResultHelper processes the response as success or failure.
+// In the success case, it unmarshals the payload into either a new instance of T or out.
+// In the failure case, it creates an *azcore.Response error from the response.
+func ResultHelper[T any](resp *http.Response, failed bool, jsonPath string, out *T) error {
+ // short-circuit the simple success case with no response body to unmarshal
+ if resp.StatusCode == http.StatusNoContent {
+ return nil
+ }
+
+ defer resp.Body.Close()
+ if !poller.StatusCodeValid(resp) || failed {
+ // the LRO failed. unmarshall the error and update state
+ return azexported.NewResponseError(resp)
+ }
+
+ // success case
+ payload, err := exported.Payload(resp, nil)
+ if err != nil {
+ return err
+ }
+
+ if jsonPath != "" && len(payload) > 0 {
+ // extract the payload from the specified JSON path.
+ // do this before the zero-length check in case there
+ // is no payload.
+ jsonBody := map[string]json.RawMessage{}
+ if err = json.Unmarshal(payload, &jsonBody); err != nil {
+ return err
+ }
+ payload = jsonBody[jsonPath]
+ }
+
+ if len(payload) == 0 {
+ return nil
+ }
+
+ if err = json.Unmarshal(payload, out); err != nil {
+ return err
+ }
+ return nil
+}
+
+// IsNonTerminalHTTPStatusCode returns true if the HTTP status code should be
+// considered non-terminal thus eligible for retry.
+func IsNonTerminalHTTPStatusCode(resp *http.Response) bool {
+ return exported.HasStatusCode(resp,
+ http.StatusRequestTimeout, // 408
+ http.StatusTooManyRequests, // 429
+ http.StatusInternalServerError, // 500
+ http.StatusBadGateway, // 502
+ http.StatusServiceUnavailable, // 503
+ http.StatusGatewayTimeout, // 504
+ )
+}
@@ -0,0 +1,44 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package shared
+
+const (
+ ContentTypeAppJSON = "application/json"
+ ContentTypeAppXML = "application/xml"
+ ContentTypeTextPlain = "text/plain"
+)
+
+const (
+ HeaderAuthorization = "Authorization"
+ HeaderAuxiliaryAuthorization = "x-ms-authorization-auxiliary"
+ HeaderAzureAsync = "Azure-AsyncOperation"
+ HeaderContentLength = "Content-Length"
+ HeaderContentType = "Content-Type"
+ HeaderFakePollerStatus = "Fake-Poller-Status"
+ HeaderLocation = "Location"
+ HeaderOperationLocation = "Operation-Location"
+ HeaderRetryAfter = "Retry-After"
+ HeaderRetryAfterMS = "Retry-After-Ms"
+ HeaderUserAgent = "User-Agent"
+ HeaderWWWAuthenticate = "WWW-Authenticate"
+ HeaderXMSClientRequestID = "x-ms-client-request-id"
+ HeaderXMSRequestID = "x-ms-request-id"
+ HeaderXMSErrorCode = "x-ms-error-code"
+ HeaderXMSRetryAfterMS = "x-ms-retry-after-ms"
+)
+
+const BearerTokenPrefix = "Bearer "
+
+const TracingNamespaceAttrName = "az.namespace"
+
+const (
+ // Module is the name of the calling module used in telemetry data.
+ Module = "azcore"
+
+ // Version is the semantic version (see http://semver.org) of this module.
+ Version = "v1.17.0"
+)
@@ -0,0 +1,149 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package shared
+
+import (
+ "context"
+ "fmt"
+ "net/http"
+ "reflect"
+ "regexp"
+ "strconv"
+ "time"
+)
+
+// NOTE: when adding a new context key type, it likely needs to be
+// added to the deny-list of key types in ContextWithDeniedValues
+
+// CtxWithHTTPHeaderKey is used as a context key for adding/retrieving http.Header.
+type CtxWithHTTPHeaderKey struct{}
+
+// CtxWithRetryOptionsKey is used as a context key for adding/retrieving RetryOptions.
+type CtxWithRetryOptionsKey struct{}
+
+// CtxWithCaptureResponse is used as a context key for retrieving the raw response.
+type CtxWithCaptureResponse struct{}
+
+// CtxWithTracingTracer is used as a context key for adding/retrieving tracing.Tracer.
+type CtxWithTracingTracer struct{}
+
+// CtxAPINameKey is used as a context key for adding/retrieving the API name.
+type CtxAPINameKey struct{}
+
+// Delay waits for the duration to elapse or the context to be cancelled.
+func Delay(ctx context.Context, delay time.Duration) error {
+ select {
+ case <-time.After(delay):
+ return nil
+ case <-ctx.Done():
+ return ctx.Err()
+ }
+}
+
+// RetryAfter returns non-zero if the response contains one of the headers with a "retry after" value.
+// Headers are checked in the following order: retry-after-ms, x-ms-retry-after-ms, retry-after
+func RetryAfter(resp *http.Response) time.Duration {
+ if resp == nil {
+ return 0
+ }
+
+ type retryData struct {
+ header string
+ units time.Duration
+
+ // custom is used when the regular algorithm failed and is optional.
+ // the returned duration is used verbatim (units is not applied).
+ custom func(string) time.Duration
+ }
+
+ nop := func(string) time.Duration { return 0 }
+
+ // the headers are listed in order of preference
+ retries := []retryData{
+ {
+ header: HeaderRetryAfterMS,
+ units: time.Millisecond,
+ custom: nop,
+ },
+ {
+ header: HeaderXMSRetryAfterMS,
+ units: time.Millisecond,
+ custom: nop,
+ },
+ {
+ header: HeaderRetryAfter,
+ units: time.Second,
+
+ // retry-after values are expressed in either number of
+ // seconds or an HTTP-date indicating when to try again
+ custom: func(ra string) time.Duration {
+ t, err := time.Parse(time.RFC1123, ra)
+ if err != nil {
+ return 0
+ }
+ return time.Until(t)
+ },
+ },
+ }
+
+ for _, retry := range retries {
+ v := resp.Header.Get(retry.header)
+ if v == "" {
+ continue
+ }
+ if retryAfter, _ := strconv.Atoi(v); retryAfter > 0 {
+ return time.Duration(retryAfter) * retry.units
+ } else if d := retry.custom(v); d > 0 {
+ return d
+ }
+ }
+
+ return 0
+}
+
+// TypeOfT returns the type of the generic type param.
+func TypeOfT[T any]() reflect.Type {
+ // you can't, at present, obtain the type of
+ // a type parameter, so this is the trick
+ return reflect.TypeOf((*T)(nil)).Elem()
+}
+
+// TransportFunc is a helper to use a first-class func to satisfy the Transporter interface.
+type TransportFunc func(*http.Request) (*http.Response, error)
+
+// Do implements the Transporter interface for the TransportFunc type.
+func (pf TransportFunc) Do(req *http.Request) (*http.Response, error) {
+ return pf(req)
+}
+
+// ValidateModVer verifies that moduleVersion is a valid semver 2.0 string.
+func ValidateModVer(moduleVersion string) error {
+ modVerRegx := regexp.MustCompile(`^v\d+\.\d+\.\d+(?:-[a-zA-Z0-9_.-]+)?$`)
+ if !modVerRegx.MatchString(moduleVersion) {
+ return fmt.Errorf("malformed moduleVersion param value %s", moduleVersion)
+ }
+ return nil
+}
+
+// ContextWithDeniedValues wraps an existing [context.Context], denying access to certain context values.
+// Pipeline policies that create new requests to be sent down their own pipeline MUST wrap the caller's
+// context with an instance of this type. This is to prevent context values from flowing across disjoint
+// requests which can have unintended side-effects.
+type ContextWithDeniedValues struct {
+ context.Context
+}
+
+// Value implements part of the [context.Context] interface.
+// It acts as a deny-list for certain context keys.
+func (c *ContextWithDeniedValues) Value(key any) any {
+ switch key.(type) {
+ case CtxAPINameKey, CtxWithCaptureResponse, CtxWithHTTPHeaderKey, CtxWithRetryOptionsKey, CtxWithTracingTracer:
+ return nil
+ default:
+ return c.Context.Value(key)
+ }
+}
@@ -0,0 +1,10 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright 2017 Microsoft Corporation. All rights reserved.
+// Use of this source code is governed by an MIT
+// license that can be found in the LICENSE file.
+
+// Package log contains functionality for configuring logging behavior.
+// Default logging to stderr can be enabled by setting environment variable AZURE_SDK_GO_LOGGING to "all".
+package log
@@ -0,0 +1,55 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+// Package log provides functionality for configuring logging facilities.
+package log
+
+import (
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+)
+
+// Event is used to group entries. Each group can be toggled on or off.
+type Event = log.Event
+
+const (
+ // EventRequest entries contain information about HTTP requests.
+ // This includes information like the URL, query parameters, and headers.
+ EventRequest Event = "Request"
+
+ // EventResponse entries contain information about HTTP responses.
+ // This includes information like the HTTP status code, headers, and request URL.
+ EventResponse Event = "Response"
+
+ // EventResponseError entries contain information about HTTP responses that returned
+ // an *azcore.ResponseError (i.e. responses with a non 2xx HTTP status code).
+ // This includes the contents of ResponseError.Error().
+ EventResponseError Event = "ResponseError"
+
+ // EventRetryPolicy entries contain information specific to the retry policy in use.
+ EventRetryPolicy Event = "Retry"
+
+ // EventLRO entries contain information specific to long-running operations.
+ // This includes information like polling location, operation state, and sleep intervals.
+ EventLRO Event = "LongRunningOperation"
+)
+
+// SetEvents is used to control which events are written to
+// the log. By default all log events are writen.
+// NOTE: this is not goroutine safe and should be called before using SDK clients.
+func SetEvents(cls ...Event) {
+ log.SetEvents(cls...)
+}
+
+// SetListener will set the Logger to write to the specified Listener.
+// NOTE: this is not goroutine safe and should be called before using SDK clients.
+func SetListener(lst func(Event, string)) {
+ log.SetListener(lst)
+}
+
+// for testing purposes
+func resetEvents() {
+ log.TestResetEvents()
+}
@@ -0,0 +1,10 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright 2017 Microsoft Corporation. All rights reserved.
+// Use of this source code is governed by an MIT
+// license that can be found in the LICENSE file.
+
+// Package policy contains the definitions needed for configuring in-box pipeline policies
+// and creating custom policies.
+package policy
@@ -0,0 +1,198 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package policy
+
+import (
+ "context"
+ "net/http"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/tracing"
+)
+
+// Policy represents an extensibility point for the Pipeline that can mutate the specified
+// Request and react to the received Response.
+type Policy = exported.Policy
+
+// Transporter represents an HTTP pipeline transport used to send HTTP requests and receive responses.
+type Transporter = exported.Transporter
+
+// Request is an abstraction over the creation of an HTTP request as it passes through the pipeline.
+// Don't use this type directly, use runtime.NewRequest() instead.
+type Request = exported.Request
+
+// ClientOptions contains optional settings for a client's pipeline.
+// Instances can be shared across calls to SDK client constructors when uniform configuration is desired.
+// Zero-value fields will have their specified default values applied during use.
+type ClientOptions struct {
+ // APIVersion overrides the default version requested of the service.
+ // Set with caution as this package version has not been tested with arbitrary service versions.
+ APIVersion string
+
+ // Cloud specifies a cloud for the client. The default is Azure Public Cloud.
+ Cloud cloud.Configuration
+
+ // InsecureAllowCredentialWithHTTP enables authenticated requests over HTTP.
+ // By default, authenticated requests to an HTTP endpoint are rejected by the client.
+ // WARNING: setting this to true will allow sending the credential in clear text. Use with caution.
+ InsecureAllowCredentialWithHTTP bool
+
+ // Logging configures the built-in logging policy.
+ Logging LogOptions
+
+ // Retry configures the built-in retry policy.
+ Retry RetryOptions
+
+ // Telemetry configures the built-in telemetry policy.
+ Telemetry TelemetryOptions
+
+ // TracingProvider configures the tracing provider.
+ // It defaults to a no-op tracer.
+ TracingProvider tracing.Provider
+
+ // Transport sets the transport for HTTP requests.
+ Transport Transporter
+
+ // PerCallPolicies contains custom policies to inject into the pipeline.
+ // Each policy is executed once per request.
+ PerCallPolicies []Policy
+
+ // PerRetryPolicies contains custom policies to inject into the pipeline.
+ // Each policy is executed once per request, and for each retry of that request.
+ PerRetryPolicies []Policy
+}
+
+// LogOptions configures the logging policy's behavior.
+type LogOptions struct {
+ // IncludeBody indicates if request and response bodies should be included in logging.
+ // The default value is false.
+ // NOTE: enabling this can lead to disclosure of sensitive information, use with care.
+ IncludeBody bool
+
+ // AllowedHeaders is the slice of headers to log with their values intact.
+ // All headers not in the slice will have their values REDACTED.
+ // Applies to request and response headers.
+ AllowedHeaders []string
+
+ // AllowedQueryParams is the slice of query parameters to log with their values intact.
+ // All query parameters not in the slice will have their values REDACTED.
+ AllowedQueryParams []string
+}
+
+// RetryOptions configures the retry policy's behavior.
+// Zero-value fields will have their specified default values applied during use.
+// This allows for modification of a subset of fields.
+type RetryOptions struct {
+ // MaxRetries specifies the maximum number of attempts a failed operation will be retried
+ // before producing an error.
+ // The default value is three. A value less than zero means one try and no retries.
+ MaxRetries int32
+
+ // TryTimeout indicates the maximum time allowed for any single try of an HTTP request.
+ // This is disabled by default. Specify a value greater than zero to enable.
+ // NOTE: Setting this to a small value might cause premature HTTP request time-outs.
+ TryTimeout time.Duration
+
+ // RetryDelay specifies the initial amount of delay to use before retrying an operation.
+ // The value is used only if the HTTP response does not contain a Retry-After header.
+ // The delay increases exponentially with each retry up to the maximum specified by MaxRetryDelay.
+ // The default value is four seconds. A value less than zero means no delay between retries.
+ RetryDelay time.Duration
+
+ // MaxRetryDelay specifies the maximum delay allowed before retrying an operation.
+ // Typically the value is greater than or equal to the value specified in RetryDelay.
+ // The default Value is 60 seconds. A value less than zero means there is no cap.
+ MaxRetryDelay time.Duration
+
+ // StatusCodes specifies the HTTP status codes that indicate the operation should be retried.
+ // A nil slice will use the following values.
+ // http.StatusRequestTimeout 408
+ // http.StatusTooManyRequests 429
+ // http.StatusInternalServerError 500
+ // http.StatusBadGateway 502
+ // http.StatusServiceUnavailable 503
+ // http.StatusGatewayTimeout 504
+ // Specifying values will replace the default values.
+ // Specifying an empty slice will disable retries for HTTP status codes.
+ StatusCodes []int
+
+ // ShouldRetry evaluates if the retry policy should retry the request.
+ // When specified, the function overrides comparison against the list of
+ // HTTP status codes and error checking within the retry policy. Context
+ // and NonRetriable errors remain evaluated before calling ShouldRetry.
+ // The *http.Response and error parameters are mutually exclusive, i.e.
+ // if one is nil, the other is not nil.
+ // A return value of true means the retry policy should retry.
+ ShouldRetry func(*http.Response, error) bool
+}
+
+// TelemetryOptions configures the telemetry policy's behavior.
+type TelemetryOptions struct {
+ // ApplicationID is an application-specific identification string to add to the User-Agent.
+ // It has a maximum length of 24 characters and must not contain any spaces.
+ ApplicationID string
+
+ // Disabled will prevent the addition of any telemetry data to the User-Agent.
+ Disabled bool
+}
+
+// TokenRequestOptions contain specific parameter that may be used by credentials types when attempting to get a token.
+type TokenRequestOptions = exported.TokenRequestOptions
+
+// BearerTokenOptions configures the bearer token policy's behavior.
+type BearerTokenOptions struct {
+ // AuthorizationHandler allows SDK developers to run client-specific logic when BearerTokenPolicy must authorize a request.
+ // When this field isn't set, the policy follows its default behavior of authorizing every request with a bearer token from
+ // its given credential.
+ AuthorizationHandler AuthorizationHandler
+
+ // InsecureAllowCredentialWithHTTP enables authenticated requests over HTTP.
+ // By default, authenticated requests to an HTTP endpoint are rejected by the client.
+ // WARNING: setting this to true will allow sending the bearer token in clear text. Use with caution.
+ InsecureAllowCredentialWithHTTP bool
+}
+
+// AuthorizationHandler allows SDK developers to insert custom logic that runs when BearerTokenPolicy must authorize a request.
+type AuthorizationHandler struct {
+ // OnRequest provides TokenRequestOptions the policy can use to acquire a token for a request. The policy calls OnRequest
+ // whenever it needs a token and may call it multiple times for the same request. Its func parameter authorizes the request
+ // with a token from the policy's credential. Implementations that need to perform I/O should use the Request's context,
+ // available from Request.Raw().Context(). When OnRequest returns an error, the policy propagates that error and doesn't send
+ // the request. When OnRequest is nil, the policy follows its default behavior, which is to authorize the request with a token
+ // from its credential according to its configuration.
+ OnRequest func(*Request, func(TokenRequestOptions) error) error
+
+ // OnChallenge allows clients to implement custom HTTP authentication challenge handling. BearerTokenPolicy calls it upon
+ // receiving a 401 response containing multiple Bearer challenges or a challenge BearerTokenPolicy itself can't handle.
+ // OnChallenge is responsible for parsing challenge(s) (the Response's WWW-Authenticate header) and reauthorizing the
+ // Request accordingly. Its func argument authorizes the Request with a token from the policy's credential using the given
+ // TokenRequestOptions. OnChallenge should honor the Request's context, available from Request.Raw().Context(). When
+ // OnChallenge returns nil, the policy will send the Request again.
+ OnChallenge func(*Request, *http.Response, func(TokenRequestOptions) error) error
+}
+
+// WithCaptureResponse applies the HTTP response retrieval annotation to the parent context.
+// The resp parameter will contain the HTTP response after the request has completed.
+func WithCaptureResponse(parent context.Context, resp **http.Response) context.Context {
+ return context.WithValue(parent, shared.CtxWithCaptureResponse{}, resp)
+}
+
+// WithHTTPHeader adds the specified http.Header to the parent context.
+// Use this to specify custom HTTP headers at the API-call level.
+// Any overlapping headers will have their values replaced with the values specified here.
+func WithHTTPHeader(parent context.Context, header http.Header) context.Context {
+ return context.WithValue(parent, shared.CtxWithHTTPHeaderKey{}, header)
+}
+
+// WithRetryOptions adds the specified RetryOptions to the parent context.
+// Use this to specify custom RetryOptions at the API-call level.
+func WithRetryOptions(parent context.Context, options RetryOptions) context.Context {
+ return context.WithValue(parent, shared.CtxWithRetryOptionsKey{}, options)
+}
@@ -0,0 +1,10 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright 2017 Microsoft Corporation. All rights reserved.
+// Use of this source code is governed by an MIT
+// license that can be found in the LICENSE file.
+
+// Package runtime contains various facilities for creating requests and handling responses.
+// The content is intended for SDK authors.
+package runtime
@@ -0,0 +1,27 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+)
+
+// NewResponseError creates an *azcore.ResponseError from the provided HTTP response.
+// Call this when a service request returns a non-successful status code.
+// The error code will be extracted from the *http.Response, either from the x-ms-error-code
+// header (preferred) or attempted to be parsed from the response body.
+func NewResponseError(resp *http.Response) error {
+ return exported.NewResponseError(resp)
+}
+
+// NewResponseErrorWithErrorCode creates an *azcore.ResponseError from the provided HTTP response and errorCode.
+// Use this variant when the error code is in a non-standard location.
+func NewResponseErrorWithErrorCode(resp *http.Response, errorCode string) error {
+ return exported.NewResponseErrorWithErrorCode(resp, errorCode)
+}
@@ -0,0 +1,138 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "reflect"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/tracing"
+)
+
+// PagingHandler contains the required data for constructing a Pager.
+type PagingHandler[T any] struct {
+ // More returns a boolean indicating if there are more pages to fetch.
+ // It uses the provided page to make the determination.
+ More func(T) bool
+
+ // Fetcher fetches the first and subsequent pages.
+ Fetcher func(context.Context, *T) (T, error)
+
+ // Tracer contains the Tracer from the client that's creating the Pager.
+ Tracer tracing.Tracer
+}
+
+// Pager provides operations for iterating over paged responses.
+// Methods on this type are not safe for concurrent use.
+type Pager[T any] struct {
+ current *T
+ handler PagingHandler[T]
+ tracer tracing.Tracer
+ firstPage bool
+}
+
+// NewPager creates an instance of Pager using the specified PagingHandler.
+// Pass a non-nil T for firstPage if the first page has already been retrieved.
+func NewPager[T any](handler PagingHandler[T]) *Pager[T] {
+ return &Pager[T]{
+ handler: handler,
+ tracer: handler.Tracer,
+ firstPage: true,
+ }
+}
+
+// More returns true if there are more pages to retrieve.
+func (p *Pager[T]) More() bool {
+ if p.current != nil {
+ return p.handler.More(*p.current)
+ }
+ return true
+}
+
+// NextPage advances the pager to the next page.
+func (p *Pager[T]) NextPage(ctx context.Context) (T, error) {
+ if p.current != nil {
+ if p.firstPage {
+ // we get here if it's an LRO-pager, we already have the first page
+ p.firstPage = false
+ return *p.current, nil
+ } else if !p.handler.More(*p.current) {
+ return *new(T), errors.New("no more pages")
+ }
+ } else {
+ // non-LRO case, first page
+ p.firstPage = false
+ }
+
+ var err error
+ ctx, endSpan := StartSpan(ctx, fmt.Sprintf("%s.NextPage", shortenTypeName(reflect.TypeOf(*p).Name())), p.tracer, nil)
+ defer func() { endSpan(err) }()
+
+ resp, err := p.handler.Fetcher(ctx, p.current)
+ if err != nil {
+ return *new(T), err
+ }
+ p.current = &resp
+ return *p.current, nil
+}
+
+// UnmarshalJSON implements the json.Unmarshaler interface for Pager[T].
+func (p *Pager[T]) UnmarshalJSON(data []byte) error {
+ return json.Unmarshal(data, &p.current)
+}
+
+// FetcherForNextLinkOptions contains the optional values for [FetcherForNextLink].
+type FetcherForNextLinkOptions struct {
+ // NextReq is the func to be called when requesting subsequent pages.
+ // Used for paged operations that have a custom next link operation.
+ NextReq func(context.Context, string) (*policy.Request, error)
+
+ // StatusCodes contains additional HTTP status codes indicating success.
+ // The default value is http.StatusOK.
+ StatusCodes []int
+}
+
+// FetcherForNextLink is a helper containing boilerplate code to simplify creating a PagingHandler[T].Fetcher from a next link URL.
+// - ctx is the [context.Context] controlling the lifetime of the HTTP operation
+// - pl is the [Pipeline] used to dispatch the HTTP request
+// - nextLink is the URL used to fetch the next page. the empty string indicates the first page is to be requested
+// - firstReq is the func to be called when creating the request for the first page
+// - options contains any optional parameters, pass nil to accept the default values
+func FetcherForNextLink(ctx context.Context, pl Pipeline, nextLink string, firstReq func(context.Context) (*policy.Request, error), options *FetcherForNextLinkOptions) (*http.Response, error) {
+ var req *policy.Request
+ var err error
+ if options == nil {
+ options = &FetcherForNextLinkOptions{}
+ }
+ if nextLink == "" {
+ req, err = firstReq(ctx)
+ } else if nextLink, err = EncodeQueryParams(nextLink); err == nil {
+ if options.NextReq != nil {
+ req, err = options.NextReq(ctx, nextLink)
+ } else {
+ req, err = NewRequest(ctx, http.MethodGet, nextLink)
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+ resp, err := pl.Do(req)
+ if err != nil {
+ return nil, err
+ }
+ successCodes := []int{http.StatusOK}
+ successCodes = append(successCodes, options.StatusCodes...)
+ if !HasStatusCode(resp, successCodes...) {
+ return nil, NewResponseError(resp)
+ }
+ return resp, nil
+}
@@ -0,0 +1,94 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+)
+
+// PipelineOptions contains Pipeline options for SDK developers
+type PipelineOptions struct {
+ // AllowedHeaders is the slice of headers to log with their values intact.
+ // All headers not in the slice will have their values REDACTED.
+ // Applies to request and response headers.
+ AllowedHeaders []string
+
+ // AllowedQueryParameters is the slice of query parameters to log with their values intact.
+ // All query parameters not in the slice will have their values REDACTED.
+ AllowedQueryParameters []string
+
+ // APIVersion overrides the default version requested of the service.
+ // Set with caution as this package version has not been tested with arbitrary service versions.
+ APIVersion APIVersionOptions
+
+ // PerCall contains custom policies to inject into the pipeline.
+ // Each policy is executed once per request.
+ PerCall []policy.Policy
+
+ // PerRetry contains custom policies to inject into the pipeline.
+ // Each policy is executed once per request, and for each retry of that request.
+ PerRetry []policy.Policy
+
+ // Tracing contains options used to configure distributed tracing.
+ Tracing TracingOptions
+}
+
+// TracingOptions contains tracing options for SDK developers.
+type TracingOptions struct {
+ // Namespace contains the value to use for the az.namespace span attribute.
+ Namespace string
+}
+
+// Pipeline represents a primitive for sending HTTP requests and receiving responses.
+// Its behavior can be extended by specifying policies during construction.
+type Pipeline = exported.Pipeline
+
+// NewPipeline creates a pipeline from connection options, with any additional policies as specified.
+// Policies from ClientOptions are placed after policies from PipelineOptions.
+// The module and version parameters are used by the telemetry policy, when enabled.
+func NewPipeline(module, version string, plOpts PipelineOptions, options *policy.ClientOptions) Pipeline {
+ cp := policy.ClientOptions{}
+ if options != nil {
+ cp = *options
+ }
+ if len(plOpts.AllowedHeaders) > 0 {
+ headers := make([]string, len(plOpts.AllowedHeaders)+len(cp.Logging.AllowedHeaders))
+ copy(headers, plOpts.AllowedHeaders)
+ headers = append(headers, cp.Logging.AllowedHeaders...)
+ cp.Logging.AllowedHeaders = headers
+ }
+ if len(plOpts.AllowedQueryParameters) > 0 {
+ qp := make([]string, len(plOpts.AllowedQueryParameters)+len(cp.Logging.AllowedQueryParams))
+ copy(qp, plOpts.AllowedQueryParameters)
+ qp = append(qp, cp.Logging.AllowedQueryParams...)
+ cp.Logging.AllowedQueryParams = qp
+ }
+ // we put the includeResponsePolicy at the very beginning so that the raw response
+ // is populated with the final response (some policies might mutate the response)
+ policies := []policy.Policy{exported.PolicyFunc(includeResponsePolicy)}
+ if cp.APIVersion != "" {
+ policies = append(policies, newAPIVersionPolicy(cp.APIVersion, &plOpts.APIVersion))
+ }
+ if !cp.Telemetry.Disabled {
+ policies = append(policies, NewTelemetryPolicy(module, version, &cp.Telemetry))
+ }
+ policies = append(policies, plOpts.PerCall...)
+ policies = append(policies, cp.PerCallPolicies...)
+ policies = append(policies, NewRetryPolicy(&cp.Retry))
+ policies = append(policies, plOpts.PerRetry...)
+ policies = append(policies, cp.PerRetryPolicies...)
+ policies = append(policies, exported.PolicyFunc(httpHeaderPolicy))
+ policies = append(policies, newHTTPTracePolicy(cp.Logging.AllowedQueryParams))
+ policies = append(policies, NewLogPolicy(&cp.Logging))
+ policies = append(policies, exported.PolicyFunc(bodyDownloadPolicy))
+ transport := cp.Transport
+ if transport == nil {
+ transport = defaultHTTPClient
+ }
+ return exported.NewPipeline(transport, policies...)
+}
@@ -0,0 +1,75 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+)
+
+// APIVersionOptions contains options for API versions
+type APIVersionOptions struct {
+ // Location indicates where to set the version on a request, for example in a header or query param
+ Location APIVersionLocation
+ // Name is the name of the header or query parameter, for example "api-version"
+ Name string
+}
+
+// APIVersionLocation indicates which part of a request identifies the service version
+type APIVersionLocation int
+
+const (
+ // APIVersionLocationQueryParam indicates a query parameter
+ APIVersionLocationQueryParam = 0
+ // APIVersionLocationHeader indicates a header
+ APIVersionLocationHeader = 1
+)
+
+// newAPIVersionPolicy constructs an APIVersionPolicy. If version is "", Do will be a no-op. If version
+// isn't empty and opts.Name is empty, Do will return an error.
+func newAPIVersionPolicy(version string, opts *APIVersionOptions) *apiVersionPolicy {
+ if opts == nil {
+ opts = &APIVersionOptions{}
+ }
+ return &apiVersionPolicy{location: opts.Location, name: opts.Name, version: version}
+}
+
+// apiVersionPolicy enables users to set the API version of every request a client sends.
+type apiVersionPolicy struct {
+ // location indicates whether "name" refers to a query parameter or header.
+ location APIVersionLocation
+
+ // name of the query param or header whose value should be overridden; provided by the client.
+ name string
+
+ // version is the value (provided by the user) that replaces the default version value.
+ version string
+}
+
+// Do sets the request's API version, if the policy is configured to do so, replacing any prior value.
+func (a *apiVersionPolicy) Do(req *policy.Request) (*http.Response, error) {
+ if a.version != "" {
+ if a.name == "" {
+ // user set ClientOptions.APIVersion but the client ctor didn't set PipelineOptions.APIVersionOptions
+ return nil, errors.New("this client doesn't support overriding its API version")
+ }
+ switch a.location {
+ case APIVersionLocationHeader:
+ req.Raw().Header.Set(a.name, a.version)
+ case APIVersionLocationQueryParam:
+ q := req.Raw().URL.Query()
+ q.Set(a.name, a.version)
+ req.Raw().URL.RawQuery = q.Encode()
+ default:
+ return nil, fmt.Errorf("unknown APIVersionLocation %d", a.location)
+ }
+ }
+ return req.Next()
+}
@@ -0,0 +1,236 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "encoding/base64"
+ "errors"
+ "net/http"
+ "regexp"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/errorinfo"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/temporal"
+)
+
+// BearerTokenPolicy authorizes requests with bearer tokens acquired from a TokenCredential.
+// It handles [Continuous Access Evaluation] (CAE) challenges. Clients needing to handle
+// additional authentication challenges, or needing more control over authorization, should
+// provide a [policy.AuthorizationHandler] in [policy.BearerTokenOptions].
+//
+// [Continuous Access Evaluation]: https://learn.microsoft.com/entra/identity/conditional-access/concept-continuous-access-evaluation
+type BearerTokenPolicy struct {
+ // mainResource is the resource to be retreived using the tenant specified in the credential
+ mainResource *temporal.Resource[exported.AccessToken, acquiringResourceState]
+ // the following fields are read-only
+ authzHandler policy.AuthorizationHandler
+ cred exported.TokenCredential
+ scopes []string
+ allowHTTP bool
+}
+
+type acquiringResourceState struct {
+ req *policy.Request
+ p *BearerTokenPolicy
+ tro policy.TokenRequestOptions
+}
+
+// acquire acquires or updates the resource; only one
+// thread/goroutine at a time ever calls this function
+func acquire(state acquiringResourceState) (newResource exported.AccessToken, newExpiration time.Time, err error) {
+ tk, err := state.p.cred.GetToken(&shared.ContextWithDeniedValues{Context: state.req.Raw().Context()}, state.tro)
+ if err != nil {
+ return exported.AccessToken{}, time.Time{}, err
+ }
+ return tk, tk.ExpiresOn, nil
+}
+
+// NewBearerTokenPolicy creates a policy object that authorizes requests with bearer tokens.
+// cred: an azcore.TokenCredential implementation such as a credential object from azidentity
+// scopes: the list of permission scopes required for the token.
+// opts: optional settings. Pass nil to accept default values; this is the same as passing a zero-value options.
+func NewBearerTokenPolicy(cred exported.TokenCredential, scopes []string, opts *policy.BearerTokenOptions) *BearerTokenPolicy {
+ if opts == nil {
+ opts = &policy.BearerTokenOptions{}
+ }
+ ah := opts.AuthorizationHandler
+ if ah.OnRequest == nil {
+ // Set a default OnRequest that simply requests a token with the given scopes. OnChallenge
+ // doesn't get a default so the policy can use a nil check to determine whether the caller
+ // provided an implementation.
+ ah.OnRequest = func(_ *policy.Request, authNZ func(policy.TokenRequestOptions) error) error {
+ // authNZ sets EnableCAE: true in all cases, no need to duplicate that here
+ return authNZ(policy.TokenRequestOptions{Scopes: scopes})
+ }
+ }
+ return &BearerTokenPolicy{
+ authzHandler: ah,
+ cred: cred,
+ scopes: scopes,
+ mainResource: temporal.NewResource(acquire),
+ allowHTTP: opts.InsecureAllowCredentialWithHTTP,
+ }
+}
+
+// authenticateAndAuthorize returns a function which authorizes req with a token from the policy's credential
+func (b *BearerTokenPolicy) authenticateAndAuthorize(req *policy.Request) func(policy.TokenRequestOptions) error {
+ return func(tro policy.TokenRequestOptions) error {
+ tro.EnableCAE = true
+ as := acquiringResourceState{p: b, req: req, tro: tro}
+ tk, err := b.mainResource.Get(as)
+ if err != nil {
+ return err
+ }
+ req.Raw().Header.Set(shared.HeaderAuthorization, shared.BearerTokenPrefix+tk.Token)
+ return nil
+ }
+}
+
+// Do authorizes a request with a bearer token
+func (b *BearerTokenPolicy) Do(req *policy.Request) (*http.Response, error) {
+ // skip adding the authorization header if no TokenCredential was provided.
+ // this prevents a panic that might be hard to diagnose and allows testing
+ // against http endpoints that don't require authentication.
+ if b.cred == nil {
+ return req.Next()
+ }
+
+ if err := checkHTTPSForAuth(req, b.allowHTTP); err != nil {
+ return nil, err
+ }
+
+ err := b.authzHandler.OnRequest(req, b.authenticateAndAuthorize(req))
+ if err != nil {
+ return nil, errorinfo.NonRetriableError(err)
+ }
+
+ res, err := req.Next()
+ if err != nil {
+ return nil, err
+ }
+
+ res, err = b.handleChallenge(req, res, false)
+ return res, err
+}
+
+// handleChallenge handles authentication challenges either directly (for CAE challenges) or by calling
+// the AuthorizationHandler. It's a no-op when the response doesn't include an authentication challenge.
+// It will recurse at most once, to handle a CAE challenge following a non-CAE challenge handled by the
+// AuthorizationHandler.
+func (b *BearerTokenPolicy) handleChallenge(req *policy.Request, res *http.Response, recursed bool) (*http.Response, error) {
+ var err error
+ if res.StatusCode == http.StatusUnauthorized {
+ b.mainResource.Expire()
+ if res.Header.Get(shared.HeaderWWWAuthenticate) != "" {
+ caeChallenge, parseErr := parseCAEChallenge(res)
+ if parseErr != nil {
+ return res, parseErr
+ }
+ switch {
+ case caeChallenge != nil:
+ authNZ := func(tro policy.TokenRequestOptions) error {
+ // Take the TokenRequestOptions provided by OnRequest and add the challenge claims. The value
+ // will be empty at time of writing because CAE is the only feature involving claims. If in
+ // the future some client needs to specify unrelated claims, this function may need to merge
+ // them with the challenge claims.
+ tro.Claims = caeChallenge.params["claims"]
+ return b.authenticateAndAuthorize(req)(tro)
+ }
+ if err = b.authzHandler.OnRequest(req, authNZ); err == nil {
+ if err = req.RewindBody(); err == nil {
+ res, err = req.Next()
+ }
+ }
+ case b.authzHandler.OnChallenge != nil && !recursed:
+ if err = b.authzHandler.OnChallenge(req, res, b.authenticateAndAuthorize(req)); err == nil {
+ if err = req.RewindBody(); err == nil {
+ if res, err = req.Next(); err == nil {
+ res, err = b.handleChallenge(req, res, true)
+ }
+ }
+ } else {
+ // don't retry challenge handling errors
+ err = errorinfo.NonRetriableError(err)
+ }
+ default:
+ // return the response to the pipeline
+ }
+ }
+ }
+ return res, err
+}
+
+func checkHTTPSForAuth(req *policy.Request, allowHTTP bool) error {
+ if strings.ToLower(req.Raw().URL.Scheme) != "https" && !allowHTTP {
+ return errorinfo.NonRetriableError(errors.New("authenticated requests are not permitted for non TLS protected (https) endpoints"))
+ }
+ return nil
+}
+
+// parseCAEChallenge returns a *authChallenge representing Response's CAE challenge (nil when Response has none).
+// If Response includes a CAE challenge having invalid claims, it returns a NonRetriableError.
+func parseCAEChallenge(res *http.Response) (*authChallenge, error) {
+ var (
+ caeChallenge *authChallenge
+ err error
+ )
+ for _, c := range parseChallenges(res) {
+ if c.scheme == "Bearer" {
+ if claims := c.params["claims"]; claims != "" && c.params["error"] == "insufficient_claims" {
+ if b, de := base64.StdEncoding.DecodeString(claims); de == nil {
+ c.params["claims"] = string(b)
+ caeChallenge = &c
+ } else {
+ // don't include the decoding error because it's something
+ // unhelpful like "illegal base64 data at input byte 42"
+ err = errorinfo.NonRetriableError(errors.New("authentication challenge contains invalid claims: " + claims))
+ }
+ break
+ }
+ }
+ }
+ return caeChallenge, err
+}
+
+var (
+ challenge, challengeParams *regexp.Regexp
+ once = &sync.Once{}
+)
+
+type authChallenge struct {
+ scheme string
+ params map[string]string
+}
+
+// parseChallenges assumes authentication challenges have quoted parameter values
+func parseChallenges(res *http.Response) []authChallenge {
+ once.Do(func() {
+ // matches challenges having quoted parameters, capturing scheme and parameters
+ challenge = regexp.MustCompile(`(?:(\w+) ((?:\w+="[^"]*",?\s*)+))`)
+ // captures parameter names and values in a match of the above expression
+ challengeParams = regexp.MustCompile(`(\w+)="([^"]*)"`)
+ })
+ parsed := []authChallenge{}
+ // WWW-Authenticate can have multiple values, each containing multiple challenges
+ for _, h := range res.Header.Values(shared.HeaderWWWAuthenticate) {
+ for _, sm := range challenge.FindAllStringSubmatch(h, -1) {
+ // sm is [challenge, scheme, params] (see regexp documentation on submatches)
+ c := authChallenge{
+ params: make(map[string]string),
+ scheme: sm[1],
+ }
+ for _, sm := range challengeParams.FindAllStringSubmatch(sm[2], -1) {
+ // sm is [key="value", key, value] (see regexp documentation on submatches)
+ c.params[sm[1]] = sm[2]
+ }
+ parsed = append(parsed, c)
+ }
+ }
+ return parsed
+}
@@ -0,0 +1,72 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/errorinfo"
+)
+
+// bodyDownloadPolicy creates a policy object that downloads the response's body to a []byte.
+func bodyDownloadPolicy(req *policy.Request) (*http.Response, error) {
+ resp, err := req.Next()
+ if err != nil {
+ return resp, err
+ }
+ var opValues bodyDownloadPolicyOpValues
+ // don't skip downloading error response bodies
+ if req.OperationValue(&opValues); opValues.Skip && resp.StatusCode < 400 {
+ return resp, err
+ }
+ // Either bodyDownloadPolicyOpValues was not specified (so skip is false)
+ // or it was specified and skip is false: don't skip downloading the body
+ _, err = Payload(resp)
+ if err != nil {
+ return resp, newBodyDownloadError(err, req)
+ }
+ return resp, err
+}
+
+// bodyDownloadPolicyOpValues is the struct containing the per-operation values
+type bodyDownloadPolicyOpValues struct {
+ Skip bool
+}
+
+type bodyDownloadError struct {
+ err error
+}
+
+func newBodyDownloadError(err error, req *policy.Request) error {
+ // on failure, only retry the request for idempotent operations.
+ // we currently identify them as DELETE, GET, and PUT requests.
+ if m := strings.ToUpper(req.Raw().Method); m == http.MethodDelete || m == http.MethodGet || m == http.MethodPut {
+ // error is safe for retry
+ return err
+ }
+ // wrap error to avoid retries
+ return &bodyDownloadError{
+ err: err,
+ }
+}
+
+func (b *bodyDownloadError) Error() string {
+ return fmt.Sprintf("body download policy: %s", b.err.Error())
+}
+
+func (b *bodyDownloadError) NonRetriable() {
+ // marker method
+}
+
+func (b *bodyDownloadError) Unwrap() error {
+ return b.err
+}
+
+var _ errorinfo.NonRetriable = (*bodyDownloadError)(nil)
@@ -0,0 +1,40 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+)
+
+// newHTTPHeaderPolicy creates a policy object that adds custom HTTP headers to a request
+func httpHeaderPolicy(req *policy.Request) (*http.Response, error) {
+ // check if any custom HTTP headers have been specified
+ if header := req.Raw().Context().Value(shared.CtxWithHTTPHeaderKey{}); header != nil {
+ for k, v := range header.(http.Header) {
+ // use Set to replace any existing value
+ // it also canonicalizes the header key
+ req.Raw().Header.Set(k, v[0])
+ // add any remaining values
+ for i := 1; i < len(v); i++ {
+ req.Raw().Header.Add(k, v[i])
+ }
+ }
+ }
+ return req.Next()
+}
+
+// WithHTTPHeader adds the specified http.Header to the parent context.
+// Use this to specify custom HTTP headers at the API-call level.
+// Any overlapping headers will have their values replaced with the values specified here.
+// Deprecated: use [policy.WithHTTPHeader] instead.
+func WithHTTPHeader(parent context.Context, header http.Header) context.Context {
+ return policy.WithHTTPHeader(parent, header)
+}
@@ -0,0 +1,154 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/tracing"
+)
+
+const (
+ attrHTTPMethod = "http.method"
+ attrHTTPURL = "http.url"
+ attrHTTPUserAgent = "http.user_agent"
+ attrHTTPStatusCode = "http.status_code"
+
+ attrAZClientReqID = "az.client_request_id"
+ attrAZServiceReqID = "az.service_request_id"
+
+ attrNetPeerName = "net.peer.name"
+)
+
+// newHTTPTracePolicy creates a new instance of the httpTracePolicy.
+// - allowedQueryParams contains the user-specified query parameters that don't need to be redacted from the trace
+func newHTTPTracePolicy(allowedQueryParams []string) exported.Policy {
+ return &httpTracePolicy{allowedQP: getAllowedQueryParams(allowedQueryParams)}
+}
+
+// httpTracePolicy is a policy that creates a trace for the HTTP request and its response
+type httpTracePolicy struct {
+ allowedQP map[string]struct{}
+}
+
+// Do implements the pipeline.Policy interfaces for the httpTracePolicy type.
+func (h *httpTracePolicy) Do(req *policy.Request) (resp *http.Response, err error) {
+ rawTracer := req.Raw().Context().Value(shared.CtxWithTracingTracer{})
+ if tracer, ok := rawTracer.(tracing.Tracer); ok && tracer.Enabled() {
+ attributes := []tracing.Attribute{
+ {Key: attrHTTPMethod, Value: req.Raw().Method},
+ {Key: attrHTTPURL, Value: getSanitizedURL(*req.Raw().URL, h.allowedQP)},
+ {Key: attrNetPeerName, Value: req.Raw().URL.Host},
+ }
+
+ if ua := req.Raw().Header.Get(shared.HeaderUserAgent); ua != "" {
+ attributes = append(attributes, tracing.Attribute{Key: attrHTTPUserAgent, Value: ua})
+ }
+ if reqID := req.Raw().Header.Get(shared.HeaderXMSClientRequestID); reqID != "" {
+ attributes = append(attributes, tracing.Attribute{Key: attrAZClientReqID, Value: reqID})
+ }
+
+ ctx := req.Raw().Context()
+ ctx, span := tracer.Start(ctx, "HTTP "+req.Raw().Method, &tracing.SpanOptions{
+ Kind: tracing.SpanKindClient,
+ Attributes: attributes,
+ })
+
+ defer func() {
+ if resp != nil {
+ span.SetAttributes(tracing.Attribute{Key: attrHTTPStatusCode, Value: resp.StatusCode})
+ if resp.StatusCode > 399 {
+ span.SetStatus(tracing.SpanStatusError, resp.Status)
+ }
+ if reqID := resp.Header.Get(shared.HeaderXMSRequestID); reqID != "" {
+ span.SetAttributes(tracing.Attribute{Key: attrAZServiceReqID, Value: reqID})
+ }
+ } else if err != nil {
+ var urlErr *url.Error
+ if errors.As(err, &urlErr) {
+ // calling *url.Error.Error() will include the unsanitized URL
+ // which we don't want. in addition, we already have the HTTP verb
+ // and sanitized URL in the trace so we aren't losing any info
+ err = urlErr.Err
+ }
+ span.SetStatus(tracing.SpanStatusError, err.Error())
+ }
+ span.End()
+ }()
+
+ req = req.WithContext(ctx)
+ }
+ resp, err = req.Next()
+ return
+}
+
+// StartSpanOptions contains the optional values for StartSpan.
+type StartSpanOptions struct {
+ // Kind indicates the kind of Span.
+ Kind tracing.SpanKind
+ // Attributes contains key-value pairs of attributes for the span.
+ Attributes []tracing.Attribute
+}
+
+// StartSpan starts a new tracing span.
+// You must call the returned func to terminate the span. Pass the applicable error
+// if the span will exit with an error condition.
+// - ctx is the parent context of the newly created context
+// - name is the name of the span. this is typically the fully qualified name of an API ("Client.Method")
+// - tracer is the client's Tracer for creating spans
+// - options contains optional values. pass nil to accept any default values
+func StartSpan(ctx context.Context, name string, tracer tracing.Tracer, options *StartSpanOptions) (context.Context, func(error)) {
+ if !tracer.Enabled() {
+ return ctx, func(err error) {}
+ }
+
+ // we MUST propagate the active tracer before returning so that the trace policy can access it
+ ctx = context.WithValue(ctx, shared.CtxWithTracingTracer{}, tracer)
+
+ if activeSpan := ctx.Value(ctxActiveSpan{}); activeSpan != nil {
+ // per the design guidelines, if a SDK method Foo() calls SDK method Bar(),
+ // then the span for Bar() must be suppressed. however, if Bar() makes a REST
+ // call, then Bar's HTTP span must be a child of Foo's span.
+ // however, there is an exception to this rule. if the SDK method Foo() is a
+ // messaging producer/consumer, and it takes a callback that's a SDK method
+ // Bar(), then the span for Bar() must _not_ be suppressed.
+ if kind := activeSpan.(tracing.SpanKind); kind == tracing.SpanKindClient || kind == tracing.SpanKindInternal {
+ return ctx, func(err error) {}
+ }
+ }
+
+ if options == nil {
+ options = &StartSpanOptions{}
+ }
+ if options.Kind == 0 {
+ options.Kind = tracing.SpanKindInternal
+ }
+
+ ctx, span := tracer.Start(ctx, name, &tracing.SpanOptions{
+ Kind: options.Kind,
+ Attributes: options.Attributes,
+ })
+ ctx = context.WithValue(ctx, ctxActiveSpan{}, options.Kind)
+ return ctx, func(err error) {
+ if err != nil {
+ errType := strings.Replace(fmt.Sprintf("%T", err), "*exported.", "*azcore.", 1)
+ span.SetStatus(tracing.SpanStatusError, fmt.Sprintf("%s:\n%s", errType, err.Error()))
+ }
+ span.End()
+ }
+}
+
+// ctxActiveSpan is used as a context key for indicating a SDK client span is in progress.
+type ctxActiveSpan struct{}
@@ -0,0 +1,35 @@
+//go:build go1.16
+// +build go1.16
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+)
+
+// includeResponsePolicy creates a policy that retrieves the raw HTTP response upon request
+func includeResponsePolicy(req *policy.Request) (*http.Response, error) {
+ resp, err := req.Next()
+ if resp == nil {
+ return resp, err
+ }
+ if httpOutRaw := req.Raw().Context().Value(shared.CtxWithCaptureResponse{}); httpOutRaw != nil {
+ httpOut := httpOutRaw.(**http.Response)
+ *httpOut = resp
+ }
+ return resp, err
+}
+
+// WithCaptureResponse applies the HTTP response retrieval annotation to the parent context.
+// The resp parameter will contain the HTTP response after the request has completed.
+// Deprecated: use [policy.WithCaptureResponse] instead.
+func WithCaptureResponse(parent context.Context, resp **http.Response) context.Context {
+ return policy.WithCaptureResponse(parent, resp)
+}
@@ -0,0 +1,64 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+)
+
+// KeyCredentialPolicy authorizes requests with a [azcore.KeyCredential].
+type KeyCredentialPolicy struct {
+ cred *exported.KeyCredential
+ header string
+ prefix string
+ allowHTTP bool
+}
+
+// KeyCredentialPolicyOptions contains the optional values configuring [KeyCredentialPolicy].
+type KeyCredentialPolicyOptions struct {
+ // InsecureAllowCredentialWithHTTP enables authenticated requests over HTTP.
+ // By default, authenticated requests to an HTTP endpoint are rejected by the client.
+ // WARNING: setting this to true will allow sending the authentication key in clear text. Use with caution.
+ InsecureAllowCredentialWithHTTP bool
+
+ // Prefix is used if the key requires a prefix before it's inserted into the HTTP request.
+ Prefix string
+}
+
+// NewKeyCredentialPolicy creates a new instance of [KeyCredentialPolicy].
+// - cred is the [azcore.KeyCredential] used to authenticate with the service
+// - header is the name of the HTTP request header in which the key is placed
+// - options contains optional configuration, pass nil to accept the default values
+func NewKeyCredentialPolicy(cred *exported.KeyCredential, header string, options *KeyCredentialPolicyOptions) *KeyCredentialPolicy {
+ if options == nil {
+ options = &KeyCredentialPolicyOptions{}
+ }
+ return &KeyCredentialPolicy{
+ cred: cred,
+ header: header,
+ prefix: options.Prefix,
+ allowHTTP: options.InsecureAllowCredentialWithHTTP,
+ }
+}
+
+// Do implementes the Do method on the [policy.Polilcy] interface.
+func (k *KeyCredentialPolicy) Do(req *policy.Request) (*http.Response, error) {
+ // skip adding the authorization header if no KeyCredential was provided.
+ // this prevents a panic that might be hard to diagnose and allows testing
+ // against http endpoints that don't require authentication.
+ if k.cred != nil {
+ if err := checkHTTPSForAuth(req, k.allowHTTP); err != nil {
+ return nil, err
+ }
+ val := exported.KeyCredentialGet(k.cred)
+ if k.prefix != "" {
+ val = k.prefix + val
+ }
+ req.Raw().Header.Add(k.header, val)
+ }
+ return req.Next()
+}
@@ -0,0 +1,264 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/diag"
+)
+
+type logPolicy struct {
+ includeBody bool
+ allowedHeaders map[string]struct{}
+ allowedQP map[string]struct{}
+}
+
+// NewLogPolicy creates a request/response logging policy object configured using the specified options.
+// Pass nil to accept the default values; this is the same as passing a zero-value options.
+func NewLogPolicy(o *policy.LogOptions) policy.Policy {
+ if o == nil {
+ o = &policy.LogOptions{}
+ }
+ // construct default hash set of allowed headers
+ allowedHeaders := map[string]struct{}{
+ "accept": {},
+ "cache-control": {},
+ "connection": {},
+ "content-length": {},
+ "content-type": {},
+ "date": {},
+ "etag": {},
+ "expires": {},
+ "if-match": {},
+ "if-modified-since": {},
+ "if-none-match": {},
+ "if-unmodified-since": {},
+ "last-modified": {},
+ "ms-cv": {},
+ "pragma": {},
+ "request-id": {},
+ "retry-after": {},
+ "server": {},
+ "traceparent": {},
+ "transfer-encoding": {},
+ "user-agent": {},
+ "www-authenticate": {},
+ "x-ms-request-id": {},
+ "x-ms-client-request-id": {},
+ "x-ms-return-client-request-id": {},
+ }
+ // add any caller-specified allowed headers to the set
+ for _, ah := range o.AllowedHeaders {
+ allowedHeaders[strings.ToLower(ah)] = struct{}{}
+ }
+ // now do the same thing for query params
+ allowedQP := getAllowedQueryParams(o.AllowedQueryParams)
+ return &logPolicy{
+ includeBody: o.IncludeBody,
+ allowedHeaders: allowedHeaders,
+ allowedQP: allowedQP,
+ }
+}
+
+// getAllowedQueryParams merges the default set of allowed query parameters
+// with a custom set (usually comes from client options).
+func getAllowedQueryParams(customAllowedQP []string) map[string]struct{} {
+ allowedQP := map[string]struct{}{
+ "api-version": {},
+ }
+ for _, qp := range customAllowedQP {
+ allowedQP[strings.ToLower(qp)] = struct{}{}
+ }
+ return allowedQP
+}
+
+// logPolicyOpValues is the struct containing the per-operation values
+type logPolicyOpValues struct {
+ try int32
+ start time.Time
+}
+
+func (p *logPolicy) Do(req *policy.Request) (*http.Response, error) {
+ // Get the per-operation values. These are saved in the Message's map so that they persist across each retry calling into this policy object.
+ var opValues logPolicyOpValues
+ if req.OperationValue(&opValues); opValues.start.IsZero() {
+ opValues.start = time.Now() // If this is the 1st try, record this operation's start time
+ }
+ opValues.try++ // The first try is #1 (not #0)
+ req.SetOperationValue(opValues)
+
+ // Log the outgoing request as informational
+ if log.Should(log.EventRequest) {
+ b := &bytes.Buffer{}
+ fmt.Fprintf(b, "==> OUTGOING REQUEST (Try=%d)\n", opValues.try)
+ p.writeRequestWithResponse(b, req, nil, nil)
+ var err error
+ if p.includeBody {
+ err = writeReqBody(req, b)
+ }
+ log.Write(log.EventRequest, b.String())
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // Set the time for this particular retry operation and then Do the operation.
+ tryStart := time.Now()
+ response, err := req.Next() // Make the request
+ tryEnd := time.Now()
+ tryDuration := tryEnd.Sub(tryStart)
+ opDuration := tryEnd.Sub(opValues.start)
+
+ if log.Should(log.EventResponse) {
+ // We're going to log this; build the string to log
+ b := &bytes.Buffer{}
+ fmt.Fprintf(b, "==> REQUEST/RESPONSE (Try=%d/%v, OpTime=%v) -- ", opValues.try, tryDuration, opDuration)
+ if err != nil { // This HTTP request did not get a response from the service
+ fmt.Fprint(b, "REQUEST ERROR\n")
+ } else {
+ fmt.Fprint(b, "RESPONSE RECEIVED\n")
+ }
+
+ p.writeRequestWithResponse(b, req, response, err)
+ if err != nil {
+ // skip frames runtime.Callers() and runtime.StackTrace()
+ b.WriteString(diag.StackTrace(2, 32))
+ } else if p.includeBody {
+ err = writeRespBody(response, b)
+ }
+ log.Write(log.EventResponse, b.String())
+ }
+ return response, err
+}
+
+const redactedValue = "REDACTED"
+
+// getSanitizedURL returns a sanitized string for the provided url.URL
+func getSanitizedURL(u url.URL, allowedQueryParams map[string]struct{}) string {
+ // redact applicable query params
+ qp := u.Query()
+ for k := range qp {
+ if _, ok := allowedQueryParams[strings.ToLower(k)]; !ok {
+ qp.Set(k, redactedValue)
+ }
+ }
+ u.RawQuery = qp.Encode()
+ return u.String()
+}
+
+// writeRequestWithResponse appends a formatted HTTP request into a Buffer. If request and/or err are
+// not nil, then these are also written into the Buffer.
+func (p *logPolicy) writeRequestWithResponse(b *bytes.Buffer, req *policy.Request, resp *http.Response, err error) {
+ // Write the request into the buffer.
+ fmt.Fprint(b, " "+req.Raw().Method+" "+getSanitizedURL(*req.Raw().URL, p.allowedQP)+"\n")
+ p.writeHeader(b, req.Raw().Header)
+ if resp != nil {
+ fmt.Fprintln(b, " --------------------------------------------------------------------------------")
+ fmt.Fprint(b, " RESPONSE Status: "+resp.Status+"\n")
+ p.writeHeader(b, resp.Header)
+ }
+ if err != nil {
+ fmt.Fprintln(b, " --------------------------------------------------------------------------------")
+ fmt.Fprint(b, " ERROR:\n"+err.Error()+"\n")
+ }
+}
+
+// formatHeaders appends an HTTP request's or response's header into a Buffer.
+func (p *logPolicy) writeHeader(b *bytes.Buffer, header http.Header) {
+ if len(header) == 0 {
+ b.WriteString(" (no headers)\n")
+ return
+ }
+ keys := make([]string, 0, len(header))
+ // Alphabetize the headers
+ for k := range header {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+ for _, k := range keys {
+ // don't use Get() as it will canonicalize k which might cause a mismatch
+ value := header[k][0]
+ // redact all header values not in the allow-list
+ if _, ok := p.allowedHeaders[strings.ToLower(k)]; !ok {
+ value = redactedValue
+ }
+ fmt.Fprintf(b, " %s: %+v\n", k, value)
+ }
+}
+
+// returns true if the request/response body should be logged.
+// this is determined by looking at the content-type header value.
+func shouldLogBody(b *bytes.Buffer, contentType string) bool {
+ contentType = strings.ToLower(contentType)
+ if strings.HasPrefix(contentType, "text") ||
+ strings.Contains(contentType, "json") ||
+ strings.Contains(contentType, "xml") {
+ return true
+ }
+ fmt.Fprintf(b, " Skip logging body for %s\n", contentType)
+ return false
+}
+
+// writes to a buffer, used for logging purposes
+func writeReqBody(req *policy.Request, b *bytes.Buffer) error {
+ if req.Raw().Body == nil {
+ fmt.Fprint(b, " Request contained no body\n")
+ return nil
+ }
+ if ct := req.Raw().Header.Get(shared.HeaderContentType); !shouldLogBody(b, ct) {
+ return nil
+ }
+ body, err := io.ReadAll(req.Raw().Body)
+ if err != nil {
+ fmt.Fprintf(b, " Failed to read request body: %s\n", err.Error())
+ return err
+ }
+ if err := req.RewindBody(); err != nil {
+ return err
+ }
+ logBody(b, body)
+ return nil
+}
+
+// writes to a buffer, used for logging purposes
+func writeRespBody(resp *http.Response, b *bytes.Buffer) error {
+ ct := resp.Header.Get(shared.HeaderContentType)
+ if ct == "" {
+ fmt.Fprint(b, " Response contained no body\n")
+ return nil
+ } else if !shouldLogBody(b, ct) {
+ return nil
+ }
+ body, err := Payload(resp)
+ if err != nil {
+ fmt.Fprintf(b, " Failed to read response body: %s\n", err.Error())
+ return err
+ }
+ if len(body) > 0 {
+ logBody(b, body)
+ } else {
+ fmt.Fprint(b, " Response contained no body\n")
+ }
+ return nil
+}
+
+func logBody(b *bytes.Buffer, body []byte) {
+ fmt.Fprintln(b, " --------------------------------------------------------------------------------")
+ fmt.Fprintln(b, string(body))
+ fmt.Fprintln(b, " --------------------------------------------------------------------------------")
+}
@@ -0,0 +1,34 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/uuid"
+)
+
+type requestIDPolicy struct{}
+
+// NewRequestIDPolicy returns a policy that add the x-ms-client-request-id header
+func NewRequestIDPolicy() policy.Policy {
+ return &requestIDPolicy{}
+}
+
+func (r *requestIDPolicy) Do(req *policy.Request) (*http.Response, error) {
+ if req.Raw().Header.Get(shared.HeaderXMSClientRequestID) == "" {
+ id, err := uuid.New()
+ if err != nil {
+ return nil, err
+ }
+ req.Raw().Header.Set(shared.HeaderXMSClientRequestID, id.String())
+ }
+
+ return req.Next()
+}
@@ -0,0 +1,276 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "errors"
+ "io"
+ "math"
+ "math/rand"
+ "net/http"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/errorinfo"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/exported"
+)
+
+const (
+ defaultMaxRetries = 3
+)
+
+func setDefaults(o *policy.RetryOptions) {
+ if o.MaxRetries == 0 {
+ o.MaxRetries = defaultMaxRetries
+ } else if o.MaxRetries < 0 {
+ o.MaxRetries = 0
+ }
+
+ // SDK guidelines specify the default MaxRetryDelay is 60 seconds
+ if o.MaxRetryDelay == 0 {
+ o.MaxRetryDelay = 60 * time.Second
+ } else if o.MaxRetryDelay < 0 {
+ // not really an unlimited cap, but sufficiently large enough to be considered as such
+ o.MaxRetryDelay = math.MaxInt64
+ }
+ if o.RetryDelay == 0 {
+ o.RetryDelay = 800 * time.Millisecond
+ } else if o.RetryDelay < 0 {
+ o.RetryDelay = 0
+ }
+ if o.StatusCodes == nil {
+ // NOTE: if you change this list, you MUST update the docs in policy/policy.go
+ o.StatusCodes = []int{
+ http.StatusRequestTimeout, // 408
+ http.StatusTooManyRequests, // 429
+ http.StatusInternalServerError, // 500
+ http.StatusBadGateway, // 502
+ http.StatusServiceUnavailable, // 503
+ http.StatusGatewayTimeout, // 504
+ }
+ }
+}
+
+func calcDelay(o policy.RetryOptions, try int32) time.Duration { // try is >=1; never 0
+ // avoid overflow when shifting left
+ factor := time.Duration(math.MaxInt64)
+ if try < 63 {
+ factor = time.Duration(int64(1<<try) - 1)
+ }
+
+ delay := factor * o.RetryDelay
+ if delay < factor {
+ // overflow has happened so set to max value
+ delay = time.Duration(math.MaxInt64)
+ }
+
+ // Introduce jitter: [0.0, 1.0) / 2 = [0.0, 0.5) + 0.8 = [0.8, 1.3)
+ jitterMultiplier := rand.Float64()/2 + 0.8 // NOTE: We want math/rand; not crypto/rand
+
+ delayFloat := float64(delay) * jitterMultiplier
+ if delayFloat > float64(math.MaxInt64) {
+ // the jitter pushed us over MaxInt64, so just use MaxInt64
+ delay = time.Duration(math.MaxInt64)
+ } else {
+ delay = time.Duration(delayFloat)
+ }
+
+ if delay > o.MaxRetryDelay { // MaxRetryDelay is backfilled with non-negative value
+ delay = o.MaxRetryDelay
+ }
+
+ return delay
+}
+
+// NewRetryPolicy creates a policy object configured using the specified options.
+// Pass nil to accept the default values; this is the same as passing a zero-value options.
+func NewRetryPolicy(o *policy.RetryOptions) policy.Policy {
+ if o == nil {
+ o = &policy.RetryOptions{}
+ }
+ p := &retryPolicy{options: *o}
+ return p
+}
+
+type retryPolicy struct {
+ options policy.RetryOptions
+}
+
+func (p *retryPolicy) Do(req *policy.Request) (resp *http.Response, err error) {
+ options := p.options
+ // check if the retry options have been overridden for this call
+ if override := req.Raw().Context().Value(shared.CtxWithRetryOptionsKey{}); override != nil {
+ options = override.(policy.RetryOptions)
+ }
+ setDefaults(&options)
+ // Exponential retry algorithm: ((2 ^ attempt) - 1) * delay * random(0.8, 1.2)
+ // When to retry: connection failure or temporary/timeout.
+ var rwbody *retryableRequestBody
+ if req.Body() != nil {
+ // wrap the body so we control when it's actually closed.
+ // do this outside the for loop so defers don't accumulate.
+ rwbody = &retryableRequestBody{body: req.Body()}
+ defer rwbody.realClose()
+ }
+ try := int32(1)
+ for {
+ resp = nil // reset
+ // unfortunately we don't have access to the custom allow-list of query params, so we'll redact everything but the default allowed QPs
+ log.Writef(log.EventRetryPolicy, "=====> Try=%d for %s %s", try, req.Raw().Method, getSanitizedURL(*req.Raw().URL, getAllowedQueryParams(nil)))
+
+ // For each try, seek to the beginning of the Body stream. We do this even for the 1st try because
+ // the stream may not be at offset 0 when we first get it and we want the same behavior for the
+ // 1st try as for additional tries.
+ err = req.RewindBody()
+ if err != nil {
+ return
+ }
+ // RewindBody() restores Raw().Body to its original state, so set our rewindable after
+ if rwbody != nil {
+ req.Raw().Body = rwbody
+ }
+
+ if options.TryTimeout == 0 {
+ clone := req.Clone(req.Raw().Context())
+ resp, err = clone.Next()
+ } else {
+ // Set the per-try time for this particular retry operation and then Do the operation.
+ tryCtx, tryCancel := context.WithTimeout(req.Raw().Context(), options.TryTimeout)
+ clone := req.Clone(tryCtx)
+ resp, err = clone.Next() // Make the request
+ // if the body was already downloaded or there was an error it's safe to cancel the context now
+ if err != nil {
+ tryCancel()
+ } else if exported.PayloadDownloaded(resp) {
+ tryCancel()
+ } else {
+ // must cancel the context after the body has been read and closed
+ resp.Body = &contextCancelReadCloser{cf: tryCancel, body: resp.Body}
+ }
+ }
+ if err == nil {
+ log.Writef(log.EventRetryPolicy, "response %d", resp.StatusCode)
+ } else {
+ log.Writef(log.EventRetryPolicy, "error %v", err)
+ }
+
+ if ctxErr := req.Raw().Context().Err(); ctxErr != nil {
+ // don't retry if the parent context has been cancelled or its deadline exceeded
+ err = ctxErr
+ log.Writef(log.EventRetryPolicy, "abort due to %v", err)
+ return
+ }
+
+ // check if the error is not retriable
+ var nre errorinfo.NonRetriable
+ if errors.As(err, &nre) {
+ // the error says it's not retriable so don't retry
+ log.Writef(log.EventRetryPolicy, "non-retriable error %T", nre)
+ return
+ }
+
+ if options.ShouldRetry != nil {
+ // a non-nil ShouldRetry overrides our HTTP status code check
+ if !options.ShouldRetry(resp, err) {
+ // predicate says we shouldn't retry
+ log.Write(log.EventRetryPolicy, "exit due to ShouldRetry")
+ return
+ }
+ } else if err == nil && !HasStatusCode(resp, options.StatusCodes...) {
+ // if there is no error and the response code isn't in the list of retry codes then we're done.
+ log.Write(log.EventRetryPolicy, "exit due to non-retriable status code")
+ return
+ }
+
+ if try == options.MaxRetries+1 {
+ // max number of tries has been reached, don't sleep again
+ log.Writef(log.EventRetryPolicy, "MaxRetries %d exceeded", options.MaxRetries)
+ return
+ }
+
+ // use the delay from retry-after if available
+ delay := shared.RetryAfter(resp)
+ if delay <= 0 {
+ delay = calcDelay(options, try)
+ } else if delay > options.MaxRetryDelay {
+ // the retry-after delay exceeds the the cap so don't retry
+ log.Writef(log.EventRetryPolicy, "Retry-After delay %s exceeds MaxRetryDelay of %s", delay, options.MaxRetryDelay)
+ return
+ }
+
+ // drain before retrying so nothing is leaked
+ Drain(resp)
+
+ log.Writef(log.EventRetryPolicy, "End Try #%d, Delay=%v", try, delay)
+ select {
+ case <-time.After(delay):
+ try++
+ case <-req.Raw().Context().Done():
+ err = req.Raw().Context().Err()
+ log.Writef(log.EventRetryPolicy, "abort due to %v", err)
+ return
+ }
+ }
+}
+
+// WithRetryOptions adds the specified RetryOptions to the parent context.
+// Use this to specify custom RetryOptions at the API-call level.
+// Deprecated: use [policy.WithRetryOptions] instead.
+func WithRetryOptions(parent context.Context, options policy.RetryOptions) context.Context {
+ return policy.WithRetryOptions(parent, options)
+}
+
+// ********** The following type/methods implement the retryableRequestBody (a ReadSeekCloser)
+
+// This struct is used when sending a body to the network
+type retryableRequestBody struct {
+ body io.ReadSeeker // Seeking is required to support retries
+}
+
+// Read reads a block of data from an inner stream and reports progress
+func (b *retryableRequestBody) Read(p []byte) (n int, err error) {
+ return b.body.Read(p)
+}
+
+func (b *retryableRequestBody) Seek(offset int64, whence int) (offsetFromStart int64, err error) {
+ return b.body.Seek(offset, whence)
+}
+
+func (b *retryableRequestBody) Close() error {
+ // We don't want the underlying transport to close the request body on transient failures so this is a nop.
+ // The retry policy closes the request body upon success.
+ return nil
+}
+
+func (b *retryableRequestBody) realClose() error {
+ if c, ok := b.body.(io.Closer); ok {
+ return c.Close()
+ }
+ return nil
+}
+
+// ********** The following type/methods implement the contextCancelReadCloser
+
+// contextCancelReadCloser combines an io.ReadCloser with a cancel func.
+// it ensures the cancel func is invoked once the body has been read and closed.
+type contextCancelReadCloser struct {
+ cf context.CancelFunc
+ body io.ReadCloser
+}
+
+func (rc *contextCancelReadCloser) Read(p []byte) (n int, err error) {
+ return rc.body.Read(p)
+}
+
+func (rc *contextCancelReadCloser) Close() error {
+ err := rc.body.Close()
+ rc.cf()
+ return err
+}
@@ -0,0 +1,55 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+)
+
+// SASCredentialPolicy authorizes requests with a [azcore.SASCredential].
+type SASCredentialPolicy struct {
+ cred *exported.SASCredential
+ header string
+ allowHTTP bool
+}
+
+// SASCredentialPolicyOptions contains the optional values configuring [SASCredentialPolicy].
+type SASCredentialPolicyOptions struct {
+ // InsecureAllowCredentialWithHTTP enables authenticated requests over HTTP.
+ // By default, authenticated requests to an HTTP endpoint are rejected by the client.
+ // WARNING: setting this to true will allow sending the authentication key in clear text. Use with caution.
+ InsecureAllowCredentialWithHTTP bool
+}
+
+// NewSASCredentialPolicy creates a new instance of [SASCredentialPolicy].
+// - cred is the [azcore.SASCredential] used to authenticate with the service
+// - header is the name of the HTTP request header in which the shared access signature is placed
+// - options contains optional configuration, pass nil to accept the default values
+func NewSASCredentialPolicy(cred *exported.SASCredential, header string, options *SASCredentialPolicyOptions) *SASCredentialPolicy {
+ if options == nil {
+ options = &SASCredentialPolicyOptions{}
+ }
+ return &SASCredentialPolicy{
+ cred: cred,
+ header: header,
+ allowHTTP: options.InsecureAllowCredentialWithHTTP,
+ }
+}
+
+// Do implementes the Do method on the [policy.Polilcy] interface.
+func (k *SASCredentialPolicy) Do(req *policy.Request) (*http.Response, error) {
+ // skip adding the authorization header if no SASCredential was provided.
+ // this prevents a panic that might be hard to diagnose and allows testing
+ // against http endpoints that don't require authentication.
+ if k.cred != nil {
+ if err := checkHTTPSForAuth(req, k.allowHTTP); err != nil {
+ return nil, err
+ }
+ req.Raw().Header.Add(k.header, exported.SASCredentialGet(k.cred))
+ }
+ return req.Next()
+}
@@ -0,0 +1,83 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "bytes"
+ "fmt"
+ "net/http"
+ "os"
+ "runtime"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+)
+
+type telemetryPolicy struct {
+ telemetryValue string
+}
+
+// NewTelemetryPolicy creates a telemetry policy object that adds telemetry information to outgoing HTTP requests.
+// The format is [<application_id> ]azsdk-go-<mod>/<ver> <platform_info>.
+// Pass nil to accept the default values; this is the same as passing a zero-value options.
+func NewTelemetryPolicy(mod, ver string, o *policy.TelemetryOptions) policy.Policy {
+ if o == nil {
+ o = &policy.TelemetryOptions{}
+ }
+ tp := telemetryPolicy{}
+ if o.Disabled {
+ return &tp
+ }
+ b := &bytes.Buffer{}
+ // normalize ApplicationID
+ if o.ApplicationID != "" {
+ o.ApplicationID = strings.ReplaceAll(o.ApplicationID, " ", "/")
+ if len(o.ApplicationID) > 24 {
+ o.ApplicationID = o.ApplicationID[:24]
+ }
+ b.WriteString(o.ApplicationID)
+ b.WriteRune(' ')
+ }
+ // mod might be the fully qualified name. in that case, we just want the package name
+ if i := strings.LastIndex(mod, "/"); i > -1 {
+ mod = mod[i+1:]
+ }
+ b.WriteString(formatTelemetry(mod, ver))
+ b.WriteRune(' ')
+ b.WriteString(platformInfo)
+ tp.telemetryValue = b.String()
+ return &tp
+}
+
+func formatTelemetry(comp, ver string) string {
+ return fmt.Sprintf("azsdk-go-%s/%s", comp, ver)
+}
+
+func (p telemetryPolicy) Do(req *policy.Request) (*http.Response, error) {
+ if p.telemetryValue == "" {
+ return req.Next()
+ }
+ // preserve the existing User-Agent string
+ if ua := req.Raw().Header.Get(shared.HeaderUserAgent); ua != "" {
+ p.telemetryValue = fmt.Sprintf("%s %s", p.telemetryValue, ua)
+ }
+ req.Raw().Header.Set(shared.HeaderUserAgent, p.telemetryValue)
+ return req.Next()
+}
+
+// NOTE: the ONLY function that should write to this variable is this func
+var platformInfo = func() string {
+ operatingSystem := runtime.GOOS // Default OS string
+ switch operatingSystem {
+ case "windows":
+ operatingSystem = os.Getenv("OS") // Get more specific OS information
+ case "linux": // accept default OS info
+ case "freebsd": // accept default OS info
+ }
+ return fmt.Sprintf("(%s; %s)", runtime.Version(), operatingSystem)
+}()
@@ -0,0 +1,396 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "flag"
+ "fmt"
+ "net/http"
+ "reflect"
+ "strings"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/log"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/async"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/body"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/fake"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/loc"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/pollers/op"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/tracing"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/poller"
+)
+
+// FinalStateVia is the enumerated type for the possible final-state-via values.
+type FinalStateVia = pollers.FinalStateVia
+
+const (
+ // FinalStateViaAzureAsyncOp indicates the final payload comes from the Azure-AsyncOperation URL.
+ FinalStateViaAzureAsyncOp = pollers.FinalStateViaAzureAsyncOp
+
+ // FinalStateViaLocation indicates the final payload comes from the Location URL.
+ FinalStateViaLocation = pollers.FinalStateViaLocation
+
+ // FinalStateViaOriginalURI indicates the final payload comes from the original URL.
+ FinalStateViaOriginalURI = pollers.FinalStateViaOriginalURI
+
+ // FinalStateViaOpLocation indicates the final payload comes from the Operation-Location URL.
+ FinalStateViaOpLocation = pollers.FinalStateViaOpLocation
+)
+
+// NewPollerOptions contains the optional parameters for NewPoller.
+type NewPollerOptions[T any] struct {
+ // FinalStateVia contains the final-state-via value for the LRO.
+ // NOTE: used only for Azure-AsyncOperation and Operation-Location LROs.
+ FinalStateVia FinalStateVia
+
+ // OperationLocationResultPath contains the JSON path to the result's
+ // payload when it's included with the terminal success response.
+ // NOTE: only used for Operation-Location LROs.
+ OperationLocationResultPath string
+
+ // Response contains a preconstructed response type.
+ // The final payload will be unmarshaled into it and returned.
+ Response *T
+
+ // Handler[T] contains a custom polling implementation.
+ Handler PollingHandler[T]
+
+ // Tracer contains the Tracer from the client that's creating the Poller.
+ Tracer tracing.Tracer
+}
+
+// NewPoller creates a Poller based on the provided initial response.
+func NewPoller[T any](resp *http.Response, pl exported.Pipeline, options *NewPollerOptions[T]) (*Poller[T], error) {
+ if options == nil {
+ options = &NewPollerOptions[T]{}
+ }
+ result := options.Response
+ if result == nil {
+ result = new(T)
+ }
+ if options.Handler != nil {
+ return &Poller[T]{
+ op: options.Handler,
+ resp: resp,
+ result: result,
+ tracer: options.Tracer,
+ }, nil
+ }
+
+ defer resp.Body.Close()
+ // this is a back-stop in case the swagger is incorrect (i.e. missing one or more status codes for success).
+ // ideally the codegen should return an error if the initial response failed and not even create a poller.
+ if !poller.StatusCodeValid(resp) {
+ return nil, errors.New("the operation failed or was cancelled")
+ }
+
+ // determine the polling method
+ var opr PollingHandler[T]
+ var err error
+ if fake.Applicable(resp) {
+ opr, err = fake.New[T](pl, resp)
+ } else if async.Applicable(resp) {
+ // async poller must be checked first as it can also have a location header
+ opr, err = async.New[T](pl, resp, options.FinalStateVia)
+ } else if op.Applicable(resp) {
+ // op poller must be checked before loc as it can also have a location header
+ opr, err = op.New[T](pl, resp, options.FinalStateVia, options.OperationLocationResultPath)
+ } else if loc.Applicable(resp) {
+ opr, err = loc.New[T](pl, resp)
+ } else if body.Applicable(resp) {
+ // must test body poller last as it's a subset of the other pollers.
+ // TODO: this is ambiguous for PATCH/PUT if it returns a 200 with no polling headers (sync completion)
+ opr, err = body.New[T](pl, resp)
+ } else if m := resp.Request.Method; resp.StatusCode == http.StatusAccepted && (m == http.MethodDelete || m == http.MethodPost) {
+ // if we get here it means we have a 202 with no polling headers.
+ // for DELETE and POST this is a hard error per ARM RPC spec.
+ return nil, errors.New("response is missing polling URL")
+ } else {
+ opr, err = pollers.NewNopPoller[T](resp)
+ }
+
+ if err != nil {
+ return nil, err
+ }
+ return &Poller[T]{
+ op: opr,
+ resp: resp,
+ result: result,
+ tracer: options.Tracer,
+ }, nil
+}
+
+// NewPollerFromResumeTokenOptions contains the optional parameters for NewPollerFromResumeToken.
+type NewPollerFromResumeTokenOptions[T any] struct {
+ // Response contains a preconstructed response type.
+ // The final payload will be unmarshaled into it and returned.
+ Response *T
+
+ // Handler[T] contains a custom polling implementation.
+ Handler PollingHandler[T]
+
+ // Tracer contains the Tracer from the client that's creating the Poller.
+ Tracer tracing.Tracer
+}
+
+// NewPollerFromResumeToken creates a Poller from a resume token string.
+func NewPollerFromResumeToken[T any](token string, pl exported.Pipeline, options *NewPollerFromResumeTokenOptions[T]) (*Poller[T], error) {
+ if options == nil {
+ options = &NewPollerFromResumeTokenOptions[T]{}
+ }
+ result := options.Response
+ if result == nil {
+ result = new(T)
+ }
+
+ if err := pollers.IsTokenValid[T](token); err != nil {
+ return nil, err
+ }
+ raw, err := pollers.ExtractToken(token)
+ if err != nil {
+ return nil, err
+ }
+ var asJSON map[string]any
+ if err := json.Unmarshal(raw, &asJSON); err != nil {
+ return nil, err
+ }
+
+ opr := options.Handler
+ // now rehydrate the poller based on the encoded poller type
+ if fake.CanResume(asJSON) {
+ opr, _ = fake.New[T](pl, nil)
+ } else if opr != nil {
+ log.Writef(log.EventLRO, "Resuming custom poller %T.", opr)
+ } else if async.CanResume(asJSON) {
+ opr, _ = async.New[T](pl, nil, "")
+ } else if body.CanResume(asJSON) {
+ opr, _ = body.New[T](pl, nil)
+ } else if loc.CanResume(asJSON) {
+ opr, _ = loc.New[T](pl, nil)
+ } else if op.CanResume(asJSON) {
+ opr, _ = op.New[T](pl, nil, "", "")
+ } else {
+ return nil, fmt.Errorf("unhandled poller token %s", string(raw))
+ }
+ if err := json.Unmarshal(raw, &opr); err != nil {
+ return nil, err
+ }
+ return &Poller[T]{
+ op: opr,
+ result: result,
+ tracer: options.Tracer,
+ }, nil
+}
+
+// PollingHandler[T] abstracts the differences among poller implementations.
+type PollingHandler[T any] interface {
+ // Done returns true if the LRO has reached a terminal state.
+ Done() bool
+
+ // Poll fetches the latest state of the LRO.
+ Poll(context.Context) (*http.Response, error)
+
+ // Result is called once the LRO has reached a terminal state. It populates the out parameter
+ // with the result of the operation.
+ Result(ctx context.Context, out *T) error
+}
+
+// Poller encapsulates a long-running operation, providing polling facilities until the operation reaches a terminal state.
+// Methods on this type are not safe for concurrent use.
+type Poller[T any] struct {
+ op PollingHandler[T]
+ resp *http.Response
+ err error
+ result *T
+ tracer tracing.Tracer
+ done bool
+}
+
+// PollUntilDoneOptions contains the optional values for the Poller[T].PollUntilDone() method.
+type PollUntilDoneOptions struct {
+ // Frequency is the time to wait between polling intervals in absence of a Retry-After header. Allowed minimum is one second.
+ // Pass zero to accept the default value (30s).
+ Frequency time.Duration
+}
+
+// PollUntilDone will poll the service endpoint until a terminal state is reached, an error is received, or the context expires.
+// It internally uses Poll(), Done(), and Result() in its polling loop, sleeping for the specified duration between intervals.
+// options: pass nil to accept the default values.
+// NOTE: the default polling frequency is 30 seconds which works well for most operations. However, some operations might
+// benefit from a shorter or longer duration.
+func (p *Poller[T]) PollUntilDone(ctx context.Context, options *PollUntilDoneOptions) (res T, err error) {
+ if options == nil {
+ options = &PollUntilDoneOptions{}
+ }
+ cp := *options
+ if cp.Frequency == 0 {
+ cp.Frequency = 30 * time.Second
+ }
+
+ ctx, endSpan := StartSpan(ctx, fmt.Sprintf("%s.PollUntilDone", shortenTypeName(reflect.TypeOf(*p).Name())), p.tracer, nil)
+ defer func() { endSpan(err) }()
+
+ // skip the floor check when executing tests so they don't take so long
+ if isTest := flag.Lookup("test.v"); isTest == nil && cp.Frequency < time.Second {
+ err = errors.New("polling frequency minimum is one second")
+ return
+ }
+
+ start := time.Now()
+ logPollUntilDoneExit := func(v any) {
+ log.Writef(log.EventLRO, "END PollUntilDone() for %T: %v, total time: %s", p.op, v, time.Since(start))
+ }
+ log.Writef(log.EventLRO, "BEGIN PollUntilDone() for %T", p.op)
+ if p.resp != nil {
+ // initial check for a retry-after header existing on the initial response
+ if retryAfter := shared.RetryAfter(p.resp); retryAfter > 0 {
+ log.Writef(log.EventLRO, "initial Retry-After delay for %s", retryAfter.String())
+ if err = shared.Delay(ctx, retryAfter); err != nil {
+ logPollUntilDoneExit(err)
+ return
+ }
+ }
+ }
+ // begin polling the endpoint until a terminal state is reached
+ for {
+ var resp *http.Response
+ resp, err = p.Poll(ctx)
+ if err != nil {
+ logPollUntilDoneExit(err)
+ return
+ }
+ if p.Done() {
+ logPollUntilDoneExit("succeeded")
+ res, err = p.Result(ctx)
+ return
+ }
+ d := cp.Frequency
+ if retryAfter := shared.RetryAfter(resp); retryAfter > 0 {
+ log.Writef(log.EventLRO, "Retry-After delay for %s", retryAfter.String())
+ d = retryAfter
+ } else {
+ log.Writef(log.EventLRO, "delay for %s", d.String())
+ }
+ if err = shared.Delay(ctx, d); err != nil {
+ logPollUntilDoneExit(err)
+ return
+ }
+ }
+}
+
+// Poll fetches the latest state of the LRO. It returns an HTTP response or error.
+// If Poll succeeds, the poller's state is updated and the HTTP response is returned.
+// If Poll fails, the poller's state is unmodified and the error is returned.
+// Calling Poll on an LRO that has reached a terminal state will return the last HTTP response.
+func (p *Poller[T]) Poll(ctx context.Context) (resp *http.Response, err error) {
+ if p.Done() {
+ // the LRO has reached a terminal state, don't poll again
+ resp = p.resp
+ return
+ }
+
+ ctx, endSpan := StartSpan(ctx, fmt.Sprintf("%s.Poll", shortenTypeName(reflect.TypeOf(*p).Name())), p.tracer, nil)
+ defer func() { endSpan(err) }()
+
+ resp, err = p.op.Poll(ctx)
+ if err != nil {
+ return
+ }
+ p.resp = resp
+ return
+}
+
+// Done returns true if the LRO has reached a terminal state.
+// Once a terminal state is reached, call Result().
+func (p *Poller[T]) Done() bool {
+ return p.op.Done()
+}
+
+// Result returns the result of the LRO and is meant to be used in conjunction with Poll and Done.
+// If the LRO completed successfully, a populated instance of T is returned.
+// If the LRO failed or was canceled, an *azcore.ResponseError error is returned.
+// Calling this on an LRO in a non-terminal state will return an error.
+func (p *Poller[T]) Result(ctx context.Context) (res T, err error) {
+ if !p.Done() {
+ err = errors.New("poller is in a non-terminal state")
+ return
+ }
+ if p.done {
+ // the result has already been retrieved, return the cached value
+ if p.err != nil {
+ err = p.err
+ return
+ }
+ res = *p.result
+ return
+ }
+
+ ctx, endSpan := StartSpan(ctx, fmt.Sprintf("%s.Result", shortenTypeName(reflect.TypeOf(*p).Name())), p.tracer, nil)
+ defer func() { endSpan(err) }()
+
+ err = p.op.Result(ctx, p.result)
+ var respErr *exported.ResponseError
+ if errors.As(err, &respErr) {
+ if pollers.IsNonTerminalHTTPStatusCode(respErr.RawResponse) {
+ // the request failed in a non-terminal way.
+ // don't cache the error or mark the Poller as done
+ return
+ }
+ // the LRO failed. record the error
+ p.err = err
+ } else if err != nil {
+ // the call to Result failed, don't cache anything in this case
+ return
+ }
+ p.done = true
+ if p.err != nil {
+ err = p.err
+ return
+ }
+ res = *p.result
+ return
+}
+
+// ResumeToken returns a value representing the poller that can be used to resume
+// the LRO at a later time. ResumeTokens are unique per service operation.
+// The token's format should be considered opaque and is subject to change.
+// Calling this on an LRO in a terminal state will return an error.
+func (p *Poller[T]) ResumeToken() (string, error) {
+ if p.Done() {
+ return "", errors.New("poller is in a terminal state")
+ }
+ tk, err := pollers.NewResumeToken[T](p.op)
+ if err != nil {
+ return "", err
+ }
+ return tk, err
+}
+
+// extracts the type name from the string returned from reflect.Value.Name()
+func shortenTypeName(s string) string {
+ // the value is formatted as follows
+ // Poller[module/Package.Type].Method
+ // we want to shorten the generic type parameter string to Type
+ // anything we don't recognize will be left as-is
+ begin := strings.Index(s, "[")
+ end := strings.Index(s, "]")
+ if begin == -1 || end == -1 {
+ return s
+ }
+
+ typeName := s[begin+1 : end]
+ if i := strings.LastIndex(typeName, "."); i > -1 {
+ typeName = typeName[i+1:]
+ }
+ return s[:begin+1] + typeName + s[end:]
+}
@@ -0,0 +1,281 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "io"
+ "mime/multipart"
+ "net/http"
+ "net/textproto"
+ "net/url"
+ "path"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/shared"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/streaming"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/uuid"
+)
+
+// Base64Encoding is usesd to specify which base-64 encoder/decoder to use when
+// encoding/decoding a slice of bytes to/from a string.
+type Base64Encoding = exported.Base64Encoding
+
+const (
+ // Base64StdFormat uses base64.StdEncoding for encoding and decoding payloads.
+ Base64StdFormat Base64Encoding = exported.Base64StdFormat
+
+ // Base64URLFormat uses base64.RawURLEncoding for encoding and decoding payloads.
+ Base64URLFormat Base64Encoding = exported.Base64URLFormat
+)
+
+// NewRequest creates a new policy.Request with the specified input.
+// The endpoint MUST be properly encoded before calling this function.
+func NewRequest(ctx context.Context, httpMethod string, endpoint string) (*policy.Request, error) {
+ return exported.NewRequest(ctx, httpMethod, endpoint)
+}
+
+// NewRequestFromRequest creates a new policy.Request with an existing *http.Request
+func NewRequestFromRequest(req *http.Request) (*policy.Request, error) {
+ return exported.NewRequestFromRequest(req)
+}
+
+// EncodeQueryParams will parse and encode any query parameters in the specified URL.
+// Any semicolons will automatically be escaped.
+func EncodeQueryParams(u string) (string, error) {
+ before, after, found := strings.Cut(u, "?")
+ if !found {
+ return u, nil
+ }
+ // starting in Go 1.17, url.ParseQuery will reject semicolons in query params.
+ // so, we must escape them first. note that this assumes that semicolons aren't
+ // being used as query param separators which is per the current RFC.
+ // for more info:
+ // https://github.com/golang/go/issues/25192
+ // https://github.com/golang/go/issues/50034
+ qp, err := url.ParseQuery(strings.ReplaceAll(after, ";", "%3B"))
+ if err != nil {
+ return "", err
+ }
+ return before + "?" + qp.Encode(), nil
+}
+
+// JoinPaths concatenates multiple URL path segments into one path,
+// inserting path separation characters as required. JoinPaths will preserve
+// query parameters in the root path
+func JoinPaths(root string, paths ...string) string {
+ if len(paths) == 0 {
+ return root
+ }
+
+ qps := ""
+ if strings.Contains(root, "?") {
+ splitPath := strings.Split(root, "?")
+ root, qps = splitPath[0], splitPath[1]
+ }
+
+ p := path.Join(paths...)
+ // path.Join will remove any trailing slashes.
+ // if one was provided, preserve it.
+ if strings.HasSuffix(paths[len(paths)-1], "/") && !strings.HasSuffix(p, "/") {
+ p += "/"
+ }
+
+ if qps != "" {
+ p = p + "?" + qps
+ }
+
+ if strings.HasSuffix(root, "/") && strings.HasPrefix(p, "/") {
+ root = root[:len(root)-1]
+ } else if !strings.HasSuffix(root, "/") && !strings.HasPrefix(p, "/") {
+ p = "/" + p
+ }
+ return root + p
+}
+
+// EncodeByteArray will base-64 encode the byte slice v.
+func EncodeByteArray(v []byte, format Base64Encoding) string {
+ return exported.EncodeByteArray(v, format)
+}
+
+// MarshalAsByteArray will base-64 encode the byte slice v, then calls SetBody.
+// The encoded value is treated as a JSON string.
+func MarshalAsByteArray(req *policy.Request, v []byte, format Base64Encoding) error {
+ // send as a JSON string
+ encode := fmt.Sprintf("\"%s\"", EncodeByteArray(v, format))
+ // tsp generated code can set Content-Type so we must prefer that
+ return exported.SetBody(req, exported.NopCloser(strings.NewReader(encode)), shared.ContentTypeAppJSON, false)
+}
+
+// MarshalAsJSON calls json.Marshal() to get the JSON encoding of v then calls SetBody.
+func MarshalAsJSON(req *policy.Request, v any) error {
+ b, err := json.Marshal(v)
+ if err != nil {
+ return fmt.Errorf("error marshalling type %T: %s", v, err)
+ }
+ // tsp generated code can set Content-Type so we must prefer that
+ return exported.SetBody(req, exported.NopCloser(bytes.NewReader(b)), shared.ContentTypeAppJSON, false)
+}
+
+// MarshalAsXML calls xml.Marshal() to get the XML encoding of v then calls SetBody.
+func MarshalAsXML(req *policy.Request, v any) error {
+ b, err := xml.Marshal(v)
+ if err != nil {
+ return fmt.Errorf("error marshalling type %T: %s", v, err)
+ }
+ // inclue the XML header as some services require it
+ b = []byte(xml.Header + string(b))
+ return req.SetBody(exported.NopCloser(bytes.NewReader(b)), shared.ContentTypeAppXML)
+}
+
+// SetMultipartFormData writes the specified keys/values as multi-part form fields with the specified value.
+// File content must be specified as an [io.ReadSeekCloser] or [streaming.MultipartContent].
+// Byte slices will be treated as JSON. All other values are treated as string values.
+func SetMultipartFormData(req *policy.Request, formData map[string]any) error {
+ body := bytes.Buffer{}
+ writer := multipart.NewWriter(&body)
+
+ writeContent := func(fieldname, filename string, src io.Reader) error {
+ fd, err := writer.CreateFormFile(fieldname, filename)
+ if err != nil {
+ return err
+ }
+ // copy the data to the form file
+ if _, err = io.Copy(fd, src); err != nil {
+ return err
+ }
+ return nil
+ }
+
+ quoteEscaper := strings.NewReplacer("\\", "\\\\", `"`, "\\\"")
+
+ writeMultipartContent := func(fieldname string, mpc streaming.MultipartContent) error {
+ if mpc.Body == nil {
+ return errors.New("streaming.MultipartContent.Body cannot be nil")
+ }
+
+ // use fieldname for the file name when unspecified
+ filename := fieldname
+
+ if mpc.ContentType == "" && mpc.Filename == "" {
+ return writeContent(fieldname, filename, mpc.Body)
+ }
+ if mpc.Filename != "" {
+ filename = mpc.Filename
+ }
+ // this is pretty much copied from multipart.Writer.CreateFormFile
+ // but lets us set the caller provided Content-Type and filename
+ h := make(textproto.MIMEHeader)
+ h.Set("Content-Disposition",
+ fmt.Sprintf(`form-data; name="%s"; filename="%s"`,
+ quoteEscaper.Replace(fieldname), quoteEscaper.Replace(filename)))
+ contentType := "application/octet-stream"
+ if mpc.ContentType != "" {
+ contentType = mpc.ContentType
+ }
+ h.Set("Content-Type", contentType)
+ fd, err := writer.CreatePart(h)
+ if err != nil {
+ return err
+ }
+ // copy the data to the form file
+ if _, err = io.Copy(fd, mpc.Body); err != nil {
+ return err
+ }
+ return nil
+ }
+
+ // the same as multipart.Writer.WriteField but lets us specify the Content-Type
+ writeField := func(fieldname, contentType string, value string) error {
+ h := make(textproto.MIMEHeader)
+ h.Set("Content-Disposition",
+ fmt.Sprintf(`form-data; name="%s"`, quoteEscaper.Replace(fieldname)))
+ h.Set("Content-Type", contentType)
+ fd, err := writer.CreatePart(h)
+ if err != nil {
+ return err
+ }
+ if _, err = fd.Write([]byte(value)); err != nil {
+ return err
+ }
+ return nil
+ }
+
+ for k, v := range formData {
+ if rsc, ok := v.(io.ReadSeekCloser); ok {
+ if err := writeContent(k, k, rsc); err != nil {
+ return err
+ }
+ continue
+ } else if rscs, ok := v.([]io.ReadSeekCloser); ok {
+ for _, rsc := range rscs {
+ if err := writeContent(k, k, rsc); err != nil {
+ return err
+ }
+ }
+ continue
+ } else if mpc, ok := v.(streaming.MultipartContent); ok {
+ if err := writeMultipartContent(k, mpc); err != nil {
+ return err
+ }
+ continue
+ } else if mpcs, ok := v.([]streaming.MultipartContent); ok {
+ for _, mpc := range mpcs {
+ if err := writeMultipartContent(k, mpc); err != nil {
+ return err
+ }
+ }
+ continue
+ }
+
+ var content string
+ contentType := shared.ContentTypeTextPlain
+ switch tt := v.(type) {
+ case []byte:
+ // JSON, don't quote it
+ content = string(tt)
+ contentType = shared.ContentTypeAppJSON
+ case string:
+ content = tt
+ default:
+ // ensure the value is in string format
+ content = fmt.Sprintf("%v", v)
+ }
+
+ if err := writeField(k, contentType, content); err != nil {
+ return err
+ }
+ }
+ if err := writer.Close(); err != nil {
+ return err
+ }
+ return req.SetBody(exported.NopCloser(bytes.NewReader(body.Bytes())), writer.FormDataContentType())
+}
+
+// SkipBodyDownload will disable automatic downloading of the response body.
+func SkipBodyDownload(req *policy.Request) {
+ req.SetOperationValue(bodyDownloadPolicyOpValues{Skip: true})
+}
+
+// CtxAPINameKey is used as a context key for adding/retrieving the API name.
+type CtxAPINameKey = shared.CtxAPINameKey
+
+// NewUUID returns a new UUID using the RFC4122 algorithm.
+func NewUUID() (string, error) {
+ u, err := uuid.New()
+ if err != nil {
+ return "", err
+ }
+ return u.String(), nil
+}
@@ -0,0 +1,109 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "bytes"
+ "encoding/json"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "net/http"
+
+ azexported "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/exported"
+)
+
+// Payload reads and returns the response body or an error.
+// On a successful read, the response body is cached.
+// Subsequent reads will access the cached value.
+func Payload(resp *http.Response) ([]byte, error) {
+ return exported.Payload(resp, nil)
+}
+
+// HasStatusCode returns true if the Response's status code is one of the specified values.
+func HasStatusCode(resp *http.Response, statusCodes ...int) bool {
+ return exported.HasStatusCode(resp, statusCodes...)
+}
+
+// UnmarshalAsByteArray will base-64 decode the received payload and place the result into the value pointed to by v.
+func UnmarshalAsByteArray(resp *http.Response, v *[]byte, format Base64Encoding) error {
+ p, err := Payload(resp)
+ if err != nil {
+ return err
+ }
+ return DecodeByteArray(string(p), v, format)
+}
+
+// UnmarshalAsJSON calls json.Unmarshal() to unmarshal the received payload into the value pointed to by v.
+func UnmarshalAsJSON(resp *http.Response, v any) error {
+ payload, err := Payload(resp)
+ if err != nil {
+ return err
+ }
+ // TODO: verify early exit is correct
+ if len(payload) == 0 {
+ return nil
+ }
+ err = removeBOM(resp)
+ if err != nil {
+ return err
+ }
+ err = json.Unmarshal(payload, v)
+ if err != nil {
+ err = fmt.Errorf("unmarshalling type %T: %s", v, err)
+ }
+ return err
+}
+
+// UnmarshalAsXML calls xml.Unmarshal() to unmarshal the received payload into the value pointed to by v.
+func UnmarshalAsXML(resp *http.Response, v any) error {
+ payload, err := Payload(resp)
+ if err != nil {
+ return err
+ }
+ // TODO: verify early exit is correct
+ if len(payload) == 0 {
+ return nil
+ }
+ err = removeBOM(resp)
+ if err != nil {
+ return err
+ }
+ err = xml.Unmarshal(payload, v)
+ if err != nil {
+ err = fmt.Errorf("unmarshalling type %T: %s", v, err)
+ }
+ return err
+}
+
+// Drain reads the response body to completion then closes it. The bytes read are discarded.
+func Drain(resp *http.Response) {
+ if resp != nil && resp.Body != nil {
+ _, _ = io.Copy(io.Discard, resp.Body)
+ resp.Body.Close()
+ }
+}
+
+// removeBOM removes any byte-order mark prefix from the payload if present.
+func removeBOM(resp *http.Response) error {
+ _, err := exported.Payload(resp, &exported.PayloadOptions{
+ BytesModifier: func(b []byte) []byte {
+ // UTF8
+ return bytes.TrimPrefix(b, []byte("\xef\xbb\xbf"))
+ },
+ })
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+// DecodeByteArray will base-64 decode the provided string into v.
+func DecodeByteArray(s string, v *[]byte, format Base64Encoding) error {
+ return azexported.DecodeByteArray(s, v, format)
+}
@@ -0,0 +1,15 @@
+//go:build !wasm
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "net"
+)
+
+func defaultTransportDialContext(dialer *net.Dialer) func(context.Context, string, string) (net.Conn, error) {
+ return dialer.DialContext
+}
@@ -0,0 +1,15 @@
+//go:build (js && wasm) || wasip1
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "context"
+ "net"
+)
+
+func defaultTransportDialContext(dialer *net.Dialer) func(context.Context, string, string) (net.Conn, error) {
+ return nil
+}
@@ -0,0 +1,48 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package runtime
+
+import (
+ "crypto/tls"
+ "net"
+ "net/http"
+ "time"
+
+ "golang.org/x/net/http2"
+)
+
+var defaultHTTPClient *http.Client
+
+func init() {
+ defaultTransport := &http.Transport{
+ Proxy: http.ProxyFromEnvironment,
+ DialContext: defaultTransportDialContext(&net.Dialer{
+ Timeout: 30 * time.Second,
+ KeepAlive: 30 * time.Second,
+ }),
+ ForceAttemptHTTP2: true,
+ MaxIdleConns: 100,
+ MaxIdleConnsPerHost: 10,
+ IdleConnTimeout: 90 * time.Second,
+ TLSHandshakeTimeout: 10 * time.Second,
+ ExpectContinueTimeout: 1 * time.Second,
+ TLSClientConfig: &tls.Config{
+ MinVersion: tls.VersionTLS12,
+ Renegotiation: tls.RenegotiateFreelyAsClient,
+ },
+ }
+ // TODO: evaluate removing this once https://github.com/golang/go/issues/59690 has been fixed
+ if http2Transport, err := http2.ConfigureTransports(defaultTransport); err == nil {
+ // if the connection has been idle for 10 seconds, send a ping frame for a health check
+ http2Transport.ReadIdleTimeout = 10 * time.Second
+ // if there's no response to the ping within the timeout, the connection will be closed
+ http2Transport.PingTimeout = 5 * time.Second
+ }
+ defaultHTTPClient = &http.Client{
+ Transport: defaultTransport,
+ }
+}
@@ -0,0 +1,9 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright 2017 Microsoft Corporation. All rights reserved.
+// Use of this source code is governed by an MIT
+// license that can be found in the LICENSE file.
+
+// Package streaming contains helpers for streaming IO operations and progress reporting.
+package streaming
@@ -0,0 +1,89 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package streaming
+
+import (
+ "io"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/internal/exported"
+)
+
+type progress struct {
+ rc io.ReadCloser
+ rsc io.ReadSeekCloser
+ pr func(bytesTransferred int64)
+ offset int64
+}
+
+// NopCloser returns a ReadSeekCloser with a no-op close method wrapping the provided io.ReadSeeker.
+// In addition to adding a Close method to an io.ReadSeeker, this can also be used to wrap an
+// io.ReadSeekCloser with a no-op Close method to allow explicit control of when the io.ReedSeekCloser
+// has its underlying stream closed.
+func NopCloser(rs io.ReadSeeker) io.ReadSeekCloser {
+ return exported.NopCloser(rs)
+}
+
+// NewRequestProgress adds progress reporting to an HTTP request's body stream.
+func NewRequestProgress(body io.ReadSeekCloser, pr func(bytesTransferred int64)) io.ReadSeekCloser {
+ return &progress{
+ rc: body,
+ rsc: body,
+ pr: pr,
+ offset: 0,
+ }
+}
+
+// NewResponseProgress adds progress reporting to an HTTP response's body stream.
+func NewResponseProgress(body io.ReadCloser, pr func(bytesTransferred int64)) io.ReadCloser {
+ return &progress{
+ rc: body,
+ rsc: nil,
+ pr: pr,
+ offset: 0,
+ }
+}
+
+// Read reads a block of data from an inner stream and reports progress
+func (p *progress) Read(b []byte) (n int, err error) {
+ n, err = p.rc.Read(b)
+ if err != nil && err != io.EOF {
+ return
+ }
+ p.offset += int64(n)
+ // Invokes the user's callback method to report progress
+ p.pr(p.offset)
+ return
+}
+
+// Seek only expects a zero or from beginning.
+func (p *progress) Seek(offset int64, whence int) (int64, error) {
+ // This should only ever be called with offset = 0 and whence = io.SeekStart
+ n, err := p.rsc.Seek(offset, whence)
+ if err == nil {
+ p.offset = int64(n)
+ }
+ return n, err
+}
+
+// requestBodyProgress supports Close but the underlying stream may not; if it does, Close will close it.
+func (p *progress) Close() error {
+ return p.rc.Close()
+}
+
+// MultipartContent contains streaming content used in multipart/form payloads.
+type MultipartContent struct {
+ // Body contains the required content body.
+ Body io.ReadSeekCloser
+
+ // ContentType optionally specifies the HTTP Content-Type for this Body.
+ // The default value is application/octet-stream.
+ ContentType string
+
+ // Filename optionally specifies the filename for this Body.
+ // The default value is the field name for the multipart/form section.
+ Filename string
+}
@@ -0,0 +1,41 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package tracing
+
+// SpanKind represents the role of a Span inside a Trace. Often, this defines how a Span will be processed and visualized by various backends.
+type SpanKind int
+
+const (
+ // SpanKindInternal indicates the span represents an internal operation within an application.
+ SpanKindInternal SpanKind = 1
+
+ // SpanKindServer indicates the span covers server-side handling of a request.
+ SpanKindServer SpanKind = 2
+
+ // SpanKindClient indicates the span describes a request to a remote service.
+ SpanKindClient SpanKind = 3
+
+ // SpanKindProducer indicates the span was created by a messaging producer.
+ SpanKindProducer SpanKind = 4
+
+ // SpanKindConsumer indicates the span was created by a messaging consumer.
+ SpanKindConsumer SpanKind = 5
+)
+
+// SpanStatus represents the status of a span.
+type SpanStatus int
+
+const (
+ // SpanStatusUnset is the default status code.
+ SpanStatusUnset SpanStatus = 0
+
+ // SpanStatusError indicates the operation contains an error.
+ SpanStatusError SpanStatus = 1
+
+ // SpanStatusOK indicates the operation completed successfully.
+ SpanStatusOK SpanStatus = 2
+)
@@ -0,0 +1,191 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+// Package tracing contains the definitions needed to support distributed tracing.
+package tracing
+
+import (
+ "context"
+)
+
+// ProviderOptions contains the optional values when creating a Provider.
+type ProviderOptions struct {
+ // for future expansion
+}
+
+// NewProvider creates a new Provider with the specified values.
+// - newTracerFn is the underlying implementation for creating Tracer instances
+// - options contains optional values; pass nil to accept the default value
+func NewProvider(newTracerFn func(name, version string) Tracer, options *ProviderOptions) Provider {
+ return Provider{
+ newTracerFn: newTracerFn,
+ }
+}
+
+// Provider is the factory that creates Tracer instances.
+// It defaults to a no-op provider.
+type Provider struct {
+ newTracerFn func(name, version string) Tracer
+}
+
+// NewTracer creates a new Tracer for the specified module name and version.
+// - module - the fully qualified name of the module
+// - version - the version of the module
+func (p Provider) NewTracer(module, version string) (tracer Tracer) {
+ if p.newTracerFn != nil {
+ tracer = p.newTracerFn(module, version)
+ }
+ return
+}
+
+/////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+// TracerOptions contains the optional values when creating a Tracer.
+type TracerOptions struct {
+ // SpanFromContext contains the implementation for the Tracer.SpanFromContext method.
+ SpanFromContext func(context.Context) Span
+}
+
+// NewTracer creates a Tracer with the specified values.
+// - newSpanFn is the underlying implementation for creating Span instances
+// - options contains optional values; pass nil to accept the default value
+func NewTracer(newSpanFn func(ctx context.Context, spanName string, options *SpanOptions) (context.Context, Span), options *TracerOptions) Tracer {
+ if options == nil {
+ options = &TracerOptions{}
+ }
+ return Tracer{
+ newSpanFn: newSpanFn,
+ spanFromContextFn: options.SpanFromContext,
+ }
+}
+
+// Tracer is the factory that creates Span instances.
+type Tracer struct {
+ attrs []Attribute
+ newSpanFn func(ctx context.Context, spanName string, options *SpanOptions) (context.Context, Span)
+ spanFromContextFn func(ctx context.Context) Span
+}
+
+// Start creates a new span and a context.Context that contains it.
+// - ctx is the parent context for this span. If it contains a Span, the newly created span will be a child of that span, else it will be a root span
+// - spanName identifies the span within a trace, it's typically the fully qualified API name
+// - options contains optional values for the span, pass nil to accept any defaults
+func (t Tracer) Start(ctx context.Context, spanName string, options *SpanOptions) (context.Context, Span) {
+ if t.newSpanFn != nil {
+ opts := SpanOptions{}
+ if options != nil {
+ opts = *options
+ }
+ opts.Attributes = append(opts.Attributes, t.attrs...)
+ return t.newSpanFn(ctx, spanName, &opts)
+ }
+ return ctx, Span{}
+}
+
+// SetAttributes sets attrs to be applied to each Span. If a key from attrs
+// already exists for an attribute of the Span it will be overwritten with
+// the value contained in attrs.
+func (t *Tracer) SetAttributes(attrs ...Attribute) {
+ t.attrs = append(t.attrs, attrs...)
+}
+
+// Enabled returns true if this Tracer is capable of creating Spans.
+func (t Tracer) Enabled() bool {
+ return t.newSpanFn != nil
+}
+
+// SpanFromContext returns the Span associated with the current context.
+// If the provided context has no Span, false is returned.
+func (t Tracer) SpanFromContext(ctx context.Context) Span {
+ if t.spanFromContextFn != nil {
+ return t.spanFromContextFn(ctx)
+ }
+ return Span{}
+}
+
+// SpanOptions contains optional settings for creating a span.
+type SpanOptions struct {
+ // Kind indicates the kind of Span.
+ Kind SpanKind
+
+ // Attributes contains key-value pairs of attributes for the span.
+ Attributes []Attribute
+}
+
+/////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+// SpanImpl abstracts the underlying implementation for Span,
+// allowing it to work with various tracing implementations.
+// Any zero-values will have their default, no-op behavior.
+type SpanImpl struct {
+ // End contains the implementation for the Span.End method.
+ End func()
+
+ // SetAttributes contains the implementation for the Span.SetAttributes method.
+ SetAttributes func(...Attribute)
+
+ // AddEvent contains the implementation for the Span.AddEvent method.
+ AddEvent func(string, ...Attribute)
+
+ // SetStatus contains the implementation for the Span.SetStatus method.
+ SetStatus func(SpanStatus, string)
+}
+
+// NewSpan creates a Span with the specified implementation.
+func NewSpan(impl SpanImpl) Span {
+ return Span{
+ impl: impl,
+ }
+}
+
+// Span is a single unit of a trace. A trace can contain multiple spans.
+// A zero-value Span provides a no-op implementation.
+type Span struct {
+ impl SpanImpl
+}
+
+// End terminates the span and MUST be called before the span leaves scope.
+// Any further updates to the span will be ignored after End is called.
+func (s Span) End() {
+ if s.impl.End != nil {
+ s.impl.End()
+ }
+}
+
+// SetAttributes sets the specified attributes on the Span.
+// Any existing attributes with the same keys will have their values overwritten.
+func (s Span) SetAttributes(attrs ...Attribute) {
+ if s.impl.SetAttributes != nil {
+ s.impl.SetAttributes(attrs...)
+ }
+}
+
+// AddEvent adds a named event with an optional set of attributes to the span.
+func (s Span) AddEvent(name string, attrs ...Attribute) {
+ if s.impl.AddEvent != nil {
+ s.impl.AddEvent(name, attrs...)
+ }
+}
+
+// SetStatus sets the status on the span along with a description.
+func (s Span) SetStatus(code SpanStatus, desc string) {
+ if s.impl.SetStatus != nil {
+ s.impl.SetStatus(code, desc)
+ }
+}
+
+/////////////////////////////////////////////////////////////////////////////////////////////////////////////
+
+// Attribute is a key-value pair.
+type Attribute struct {
+ // Key is the name of the attribute.
+ Key string
+
+ // Value is the attribute's value.
+ // Types that are natively supported include int64, float64, int, bool, string.
+ // Any other type will be formatted per rules of fmt.Sprintf("%v").
+ Value any
+}
@@ -0,0 +1,4 @@
+# live test artifacts
+Dockerfile
+k8s.yaml
+sshkey*
@@ -0,0 +1,575 @@
+# Release History
+
+## 1.7.0 (2024-06-20)
+
+### Features Added
+* `AzurePipelinesCredential` authenticates an Azure Pipelines service connection with
+ workload identity federation
+
+### Breaking Changes
+> These changes affect only code written against a beta version such as v1.7.0-beta.1
+* Removed the persistent token caching API. It will return in v1.8.0-beta.1
+
+## 1.7.0-beta.1 (2024-06-10)
+
+### Features Added
+* Restored `AzurePipelinesCredential` and persistent token caching API
+
+## Breaking Changes
+> These changes affect only code written against a beta version such as v1.6.0-beta.4
+* Values which `NewAzurePipelinesCredential` read from environment variables in
+ prior versions are now parameters
+* Renamed `AzurePipelinesServiceConnectionCredentialOptions` to `AzurePipelinesCredentialOptions`
+
+### Bugs Fixed
+* Managed identity bug fixes
+
+## 1.6.0 (2024-06-10)
+
+### Features Added
+* `NewOnBehalfOfCredentialWithClientAssertions` creates an on-behalf-of credential
+ that authenticates with client assertions such as federated credentials
+
+### Breaking Changes
+> These changes affect only code written against a beta version such as v1.6.0-beta.4
+* Removed `AzurePipelinesCredential` and the persistent token caching API.
+ They will return in v1.7.0-beta.1
+
+### Bugs Fixed
+* Managed identity bug fixes
+
+## 1.6.0-beta.4 (2024-05-14)
+
+### Features Added
+* `AzurePipelinesCredential` authenticates an Azure Pipeline service connection with
+ workload identity federation
+
+## 1.6.0-beta.3 (2024-04-09)
+
+### Breaking Changes
+* `DefaultAzureCredential` now sends a probe request with no retries for IMDS managed identity
+ environments to avoid excessive retry delays when the IMDS endpoint is not available. This
+ should improve credential chain resolution for local development scenarios.
+
+### Bugs Fixed
+* `ManagedIdentityCredential` now specifies resource IDs correctly for Azure Container Instances
+
+## 1.5.2 (2024-04-09)
+
+### Bugs Fixed
+* `ManagedIdentityCredential` now specifies resource IDs correctly for Azure Container Instances
+
+### Other Changes
+* Restored v1.4.0 error behavior for empty tenant IDs
+* Upgraded dependencies
+
+## 1.6.0-beta.2 (2024-02-06)
+
+### Breaking Changes
+> These changes affect only code written against a beta version such as v1.6.0-beta.1
+* Replaced `ErrAuthenticationRequired` with `AuthenticationRequiredError`, a struct
+ type that carries the `TokenRequestOptions` passed to the `GetToken` call which
+ returned the error.
+
+### Bugs Fixed
+* Fixed more cases in which credential chains like `DefaultAzureCredential`
+ should try their next credential after attempting managed identity
+ authentication in a Docker Desktop container
+
+### Other Changes
+* `AzureCLICredential` uses the CLI's `expires_on` value for token expiration
+
+## 1.6.0-beta.1 (2024-01-17)
+
+### Features Added
+* Restored persistent token caching API first added in v1.5.0-beta.1
+* Added `AzureCLICredentialOptions.Subscription`
+
+## 1.5.1 (2024-01-17)
+
+### Bugs Fixed
+* `InteractiveBrowserCredential` handles `AdditionallyAllowedTenants` correctly
+
+## 1.5.0 (2024-01-16)
+
+### Breaking Changes
+> These changes affect only code written against a beta version such as v1.5.0-beta.1
+* Removed persistent token caching. It will return in v1.6.0-beta.1
+
+### Bugs Fixed
+* Credentials now preserve MSAL headers e.g. X-Client-Sku
+
+### Other Changes
+* Upgraded dependencies
+
+## 1.5.0-beta.2 (2023-11-07)
+
+### Features Added
+* `DefaultAzureCredential` and `ManagedIdentityCredential` support Azure ML managed identity
+* Added spans for distributed tracing.
+
+## 1.5.0-beta.1 (2023-10-10)
+
+### Features Added
+* Optional persistent token caching for most credentials. Set `TokenCachePersistenceOptions`
+ on a credential's options to enable and configure this. See the package documentation for
+ this version and [TOKEN_CACHING.md](https://aka.ms/azsdk/go/identity/caching) for more
+ details.
+* `AzureDeveloperCLICredential` authenticates with the Azure Developer CLI (`azd`). This
+ credential is also part of the `DefaultAzureCredential` authentication flow.
+
+## 1.4.0 (2023-10-10)
+
+### Bugs Fixed
+* `ManagedIdentityCredential` will now retry when IMDS responds 410 or 503
+
+## 1.4.0-beta.5 (2023-09-12)
+
+### Features Added
+* Service principal credentials can request CAE tokens
+
+### Breaking Changes
+> These changes affect only code written against a beta version such as v1.4.0-beta.4
+* Whether `GetToken` requests a CAE token is now determined by `TokenRequestOptions.EnableCAE`. Azure
+ SDK clients which support CAE will set this option automatically. Credentials no longer request CAE
+ tokens by default or observe the environment variable "AZURE_IDENTITY_DISABLE_CP1".
+
+### Bugs Fixed
+* Credential chains such as `DefaultAzureCredential` now try their next credential, if any, when
+ managed identity authentication fails in a Docker Desktop container
+ ([#21417](https://github.com/Azure/azure-sdk-for-go/issues/21417))
+
+## 1.4.0-beta.4 (2023-08-16)
+
+### Other Changes
+* Upgraded dependencies
+
+## 1.3.1 (2023-08-16)
+
+### Other Changes
+* Upgraded dependencies
+
+## 1.4.0-beta.3 (2023-08-08)
+
+### Bugs Fixed
+* One invocation of `AzureCLICredential.GetToken()` and `OnBehalfOfCredential.GetToken()`
+ can no longer make two authentication attempts
+
+## 1.4.0-beta.2 (2023-07-14)
+
+### Other Changes
+* `DefaultAzureCredentialOptions.TenantID` applies to workload identity authentication
+* Upgraded dependencies
+
+## 1.4.0-beta.1 (2023-06-06)
+
+### Other Changes
+* Re-enabled CAE support as in v1.3.0-beta.3
+
+## 1.3.0 (2023-05-09)
+
+### Breaking Changes
+> These changes affect only code written against a beta version such as v1.3.0-beta.5
+* Renamed `NewOnBehalfOfCredentialFromCertificate` to `NewOnBehalfOfCredentialWithCertificate`
+* Renamed `NewOnBehalfOfCredentialFromSecret` to `NewOnBehalfOfCredentialWithSecret`
+
+### Other Changes
+* Upgraded to MSAL v1.0.0
+
+## 1.3.0-beta.5 (2023-04-11)
+
+### Breaking Changes
+> These changes affect only code written against a beta version such as v1.3.0-beta.4
+* Moved `NewWorkloadIdentityCredential()` parameters into `WorkloadIdentityCredentialOptions`.
+ The constructor now reads default configuration from environment variables set by the Azure
+ workload identity webhook by default.
+ ([#20478](https://github.com/Azure/azure-sdk-for-go/pull/20478))
+* Removed CAE support. It will return in v1.4.0-beta.1
+ ([#20479](https://github.com/Azure/azure-sdk-for-go/pull/20479))
+
+### Bugs Fixed
+* Fixed an issue in `DefaultAzureCredential` that could cause the managed identity endpoint check to fail in rare circumstances.
+
+## 1.3.0-beta.4 (2023-03-08)
+
+### Features Added
+* Added `WorkloadIdentityCredentialOptions.AdditionallyAllowedTenants` and `.DisableInstanceDiscovery`
+
+### Bugs Fixed
+* Credentials now synchronize within `GetToken()` so a single instance can be shared among goroutines
+ ([#20044](https://github.com/Azure/azure-sdk-for-go/issues/20044))
+
+### Other Changes
+* Upgraded dependencies
+
+## 1.2.2 (2023-03-07)
+
+### Other Changes
+* Upgraded dependencies
+
+## 1.3.0-beta.3 (2023-02-07)
+
+### Features Added
+* By default, credentials set client capability "CP1" to enable support for
+ [Continuous Access Evaluation (CAE)](https://learn.microsoft.com/entra/identity-platform/app-resilience-continuous-access-evaluation).
+ This indicates to Microsoft Entra ID that your application can handle CAE claims challenges.
+ You can disable this behavior by setting the environment variable "AZURE_IDENTITY_DISABLE_CP1" to "true".
+* `InteractiveBrowserCredentialOptions.LoginHint` enables pre-populating the login
+ prompt with a username ([#15599](https://github.com/Azure/azure-sdk-for-go/pull/15599))
+* Service principal and user credentials support ADFS authentication on Azure Stack.
+ Specify "adfs" as the credential's tenant.
+* Applications running in private or disconnected clouds can prevent credentials from
+ requesting Microsoft Entra instance metadata by setting the `DisableInstanceDiscovery`
+ field on credential options.
+* Many credentials can now be configured to authenticate in multiple tenants. The
+ options types for these credentials have an `AdditionallyAllowedTenants` field
+ that specifies additional tenants in which the credential may authenticate.
+
+## 1.3.0-beta.2 (2023-01-10)
+
+### Features Added
+* Added `OnBehalfOfCredential` to support the on-behalf-of flow
+ ([#16642](https://github.com/Azure/azure-sdk-for-go/issues/16642))
+
+### Bugs Fixed
+* `AzureCLICredential` reports token expiration in local time (should be UTC)
+
+### Other Changes
+* `AzureCLICredential` imposes its default timeout only when the `Context`
+ passed to `GetToken()` has no deadline
+* Added `NewCredentialUnavailableError()`. This function constructs an error indicating
+ a credential can't authenticate and an encompassing `ChainedTokenCredential` should
+ try its next credential, if any.
+
+## 1.3.0-beta.1 (2022-12-13)
+
+### Features Added
+* `WorkloadIdentityCredential` and `DefaultAzureCredential` support
+ Workload Identity Federation on Kubernetes. `DefaultAzureCredential`
+ support requires environment variable configuration as set by the
+ Workload Identity webhook.
+ ([#15615](https://github.com/Azure/azure-sdk-for-go/issues/15615))
+
+## 1.2.0 (2022-11-08)
+
+### Other Changes
+* This version includes all fixes and features from 1.2.0-beta.*
+
+## 1.2.0-beta.3 (2022-10-11)
+
+### Features Added
+* `ManagedIdentityCredential` caches tokens in memory
+
+### Bugs Fixed
+* `ClientCertificateCredential` sends only the leaf cert for SNI authentication
+
+## 1.2.0-beta.2 (2022-08-10)
+
+### Features Added
+* Added `ClientAssertionCredential` to enable applications to authenticate
+ with custom client assertions
+
+### Other Changes
+* Updated AuthenticationFailedError with links to TROUBLESHOOTING.md for relevant errors
+* Upgraded `microsoft-authentication-library-for-go` requirement to v0.6.0
+
+## 1.2.0-beta.1 (2022-06-07)
+
+### Features Added
+* `EnvironmentCredential` reads certificate passwords from `AZURE_CLIENT_CERTIFICATE_PASSWORD`
+ ([#17099](https://github.com/Azure/azure-sdk-for-go/pull/17099))
+
+## 1.1.0 (2022-06-07)
+
+### Features Added
+* `ClientCertificateCredential` and `ClientSecretCredential` support ESTS-R. First-party
+ applications can set environment variable `AZURE_REGIONAL_AUTHORITY_NAME` with a
+ region name.
+ ([#15605](https://github.com/Azure/azure-sdk-for-go/issues/15605))
+
+## 1.0.1 (2022-06-07)
+
+### Other Changes
+* Upgrade `microsoft-authentication-library-for-go` requirement to v0.5.1
+ ([#18176](https://github.com/Azure/azure-sdk-for-go/issues/18176))
+
+## 1.0.0 (2022-05-12)
+
+### Features Added
+* `DefaultAzureCredential` reads environment variable `AZURE_CLIENT_ID` for the
+ client ID of a user-assigned managed identity
+ ([#17293](https://github.com/Azure/azure-sdk-for-go/pull/17293))
+
+### Breaking Changes
+* Removed `AuthorizationCodeCredential`. Use `InteractiveBrowserCredential` instead
+ to authenticate a user with the authorization code flow.
+* Instances of `AuthenticationFailedError` are now returned by pointer.
+* `GetToken()` returns `azcore.AccessToken` by value
+
+### Bugs Fixed
+* `AzureCLICredential` panics after receiving an unexpected error type
+ ([#17490](https://github.com/Azure/azure-sdk-for-go/issues/17490))
+
+### Other Changes
+* `GetToken()` returns an error when the caller specifies no scope
+* Updated to the latest versions of `golang.org/x/crypto`, `azcore` and `internal`
+
+## 0.14.0 (2022-04-05)
+
+### Breaking Changes
+* This module now requires Go 1.18
+* Removed `AuthorityHost`. Credentials are now configured for sovereign or private
+ clouds with the API in `azcore/cloud`, for example:
+ ```go
+ // before
+ opts := azidentity.ClientSecretCredentialOptions{AuthorityHost: azidentity.AzureGovernment}
+ cred, err := azidentity.NewClientSecretCredential(tenantID, clientID, secret, &opts)
+
+ // after
+ import "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud"
+
+ opts := azidentity.ClientSecretCredentialOptions{}
+ opts.Cloud = cloud.AzureGovernment
+ cred, err := azidentity.NewClientSecretCredential(tenantID, clientID, secret, &opts)
+ ```
+
+## 0.13.2 (2022-03-08)
+
+### Bugs Fixed
+* Prevented a data race in `DefaultAzureCredential` and `ChainedTokenCredential`
+ ([#17144](https://github.com/Azure/azure-sdk-for-go/issues/17144))
+
+### Other Changes
+* Upgraded App Service managed identity version from 2017-09-01 to 2019-08-01
+ ([#17086](https://github.com/Azure/azure-sdk-for-go/pull/17086))
+
+## 0.13.1 (2022-02-08)
+
+### Features Added
+* `EnvironmentCredential` supports certificate SNI authentication when
+ `AZURE_CLIENT_SEND_CERTIFICATE_CHAIN` is "true".
+ ([#16851](https://github.com/Azure/azure-sdk-for-go/pull/16851))
+
+### Bugs Fixed
+* `ManagedIdentityCredential.GetToken()` now returns an error when configured for
+ a user assigned identity in Azure Cloud Shell (which doesn't support such identities)
+ ([#16946](https://github.com/Azure/azure-sdk-for-go/pull/16946))
+
+### Other Changes
+* `NewDefaultAzureCredential()` logs non-fatal errors. These errors are also included in the
+ error returned by `DefaultAzureCredential.GetToken()` when it's unable to acquire a token
+ from any source. ([#15923](https://github.com/Azure/azure-sdk-for-go/issues/15923))
+
+## 0.13.0 (2022-01-11)
+
+### Breaking Changes
+* Replaced `AuthenticationFailedError.RawResponse()` with a field having the same name
+* Unexported `CredentialUnavailableError`
+* Instances of `ChainedTokenCredential` will now skip looping through the list of source credentials and re-use the first successful credential on subsequent calls to `GetToken`.
+ * If `ChainedTokenCredentialOptions.RetrySources` is true, `ChainedTokenCredential` will continue to try all of the originally provided credentials each time the `GetToken` method is called.
+ * `ChainedTokenCredential.successfulCredential` will contain a reference to the last successful credential.
+ * `DefaultAzureCredenial` will also re-use the first successful credential on subsequent calls to `GetToken`.
+ * `DefaultAzureCredential.chain.successfulCredential` will also contain a reference to the last successful credential.
+
+### Other Changes
+* `ManagedIdentityCredential` no longer probes IMDS before requesting a token
+ from it. Also, an error response from IMDS no longer disables a credential
+ instance. Following an error, a credential instance will continue to send
+ requests to IMDS as necessary.
+* Adopted MSAL for user and service principal authentication
+* Updated `azcore` requirement to 0.21.0
+
+## 0.12.0 (2021-11-02)
+### Breaking Changes
+* Raised minimum go version to 1.16
+* Removed `NewAuthenticationPolicy()` from credentials. Clients should instead use azcore's
+ `runtime.NewBearerTokenPolicy()` to construct a bearer token authorization policy.
+* The `AuthorityHost` field in credential options structs is now a custom type,
+ `AuthorityHost`, with underlying type `string`
+* `NewChainedTokenCredential` has a new signature to accommodate a placeholder
+ options struct:
+ ```go
+ // before
+ cred, err := NewChainedTokenCredential(credA, credB)
+
+ // after
+ cred, err := NewChainedTokenCredential([]azcore.TokenCredential{credA, credB}, nil)
+ ```
+* Removed `ExcludeAzureCLICredential`, `ExcludeEnvironmentCredential`, and `ExcludeMSICredential`
+ from `DefaultAzureCredentialOptions`
+* `NewClientCertificateCredential` requires a `[]*x509.Certificate` and `crypto.PrivateKey` instead of
+ a path to a certificate file. Added `ParseCertificates` to simplify getting these in common cases:
+ ```go
+ // before
+ cred, err := NewClientCertificateCredential("tenant", "client-id", "/cert.pem", nil)
+
+ // after
+ certData, err := os.ReadFile("/cert.pem")
+ certs, key, err := ParseCertificates(certData, password)
+ cred, err := NewClientCertificateCredential(tenantID, clientID, certs, key, nil)
+ ```
+* Removed `InteractiveBrowserCredentialOptions.ClientSecret` and `.Port`
+* Removed `AADAuthenticationFailedError`
+* Removed `id` parameter of `NewManagedIdentityCredential()`. User assigned identities are now
+ specified by `ManagedIdentityCredentialOptions.ID`:
+ ```go
+ // before
+ cred, err := NewManagedIdentityCredential("client-id", nil)
+ // or, for a resource ID
+ opts := &ManagedIdentityCredentialOptions{ID: ResourceID}
+ cred, err := NewManagedIdentityCredential("/subscriptions/...", opts)
+
+ // after
+ clientID := ClientID("7cf7db0d-...")
+ opts := &ManagedIdentityCredentialOptions{ID: clientID}
+ // or, for a resource ID
+ resID: ResourceID("/subscriptions/...")
+ opts := &ManagedIdentityCredentialOptions{ID: resID}
+ cred, err := NewManagedIdentityCredential(opts)
+ ```
+* `DeviceCodeCredentialOptions.UserPrompt` has a new type: `func(context.Context, DeviceCodeMessage) error`
+* Credential options structs now embed `azcore.ClientOptions`. In addition to changing literal initialization
+ syntax, this change renames `HTTPClient` fields to `Transport`.
+* Renamed `LogCredential` to `EventCredential`
+* `AzureCLICredential` no longer reads the environment variable `AZURE_CLI_PATH`
+* `NewManagedIdentityCredential` no longer reads environment variables `AZURE_CLIENT_ID` and
+ `AZURE_RESOURCE_ID`. Use `ManagedIdentityCredentialOptions.ID` instead.
+* Unexported `AuthenticationFailedError` and `CredentialUnavailableError` structs. In their place are two
+ interfaces having the same names.
+
+### Bugs Fixed
+* `AzureCLICredential.GetToken` no longer mutates its `opts.Scopes`
+
+### Features Added
+* Added connection configuration options to `DefaultAzureCredentialOptions`
+* `AuthenticationFailedError.RawResponse()` returns the HTTP response motivating the error,
+ if available
+
+### Other Changes
+* `NewDefaultAzureCredential()` returns `*DefaultAzureCredential` instead of `*ChainedTokenCredential`
+* Added `TenantID` field to `DefaultAzureCredentialOptions` and `AzureCLICredentialOptions`
+
+## 0.11.0 (2021-09-08)
+### Breaking Changes
+* Unexported `AzureCLICredentialOptions.TokenProvider` and its type,
+ `AzureCLITokenProvider`
+
+### Bug Fixes
+* `ManagedIdentityCredential.GetToken` returns `CredentialUnavailableError`
+ when IMDS has no assigned identity, signaling `DefaultAzureCredential` to
+ try other credentials
+
+
+## 0.10.0 (2021-08-30)
+### Breaking Changes
+* Update based on `azcore` refactor [#15383](https://github.com/Azure/azure-sdk-for-go/pull/15383)
+
+## 0.9.3 (2021-08-20)
+
+### Bugs Fixed
+* `ManagedIdentityCredential.GetToken` no longer mutates its `opts.Scopes`
+
+### Other Changes
+* Bumps version of `azcore` to `v0.18.1`
+
+
+## 0.9.2 (2021-07-23)
+### Features Added
+* Adding support for Service Fabric environment in `ManagedIdentityCredential`
+* Adding an option for using a resource ID instead of client ID in `ManagedIdentityCredential`
+
+
+## 0.9.1 (2021-05-24)
+### Features Added
+* Add LICENSE.txt and bump version information
+
+
+## 0.9.0 (2021-05-21)
+### Features Added
+* Add support for authenticating in Azure Stack environments
+* Enable user assigned identities for the IMDS scenario in `ManagedIdentityCredential`
+* Add scope to resource conversion in `GetToken()` on `ManagedIdentityCredential`
+
+
+## 0.8.0 (2021-01-20)
+### Features Added
+* Updating documentation
+
+
+## 0.7.1 (2021-01-04)
+### Features Added
+* Adding port option to `InteractiveBrowserCredential`
+
+
+## 0.7.0 (2020-12-11)
+### Features Added
+* Add `redirectURI` parameter back to authentication code flow
+
+
+## 0.6.1 (2020-12-09)
+### Features Added
+* Updating query parameter in `ManagedIdentityCredential` and updating datetime string for parsing managed identity access tokens.
+
+
+## 0.6.0 (2020-11-16)
+### Features Added
+* Remove `RedirectURL` parameter from auth code flow to align with the MSAL implementation which relies on the native client redirect URL.
+
+
+## 0.5.0 (2020-10-30)
+### Features Added
+* Flattening credential options
+
+
+## 0.4.3 (2020-10-21)
+### Features Added
+* Adding Azure Arc support in `ManagedIdentityCredential`
+
+
+## 0.4.2 (2020-10-16)
+### Features Added
+* Typo fixes
+
+
+## 0.4.1 (2020-10-16)
+### Features Added
+* Ensure authority hosts are only HTTPs
+
+
+## 0.4.0 (2020-10-16)
+### Features Added
+* Adding options structs for credentials
+
+
+## 0.3.0 (2020-10-09)
+### Features Added
+* Update `DeviceCodeCredential` callback
+
+
+## 0.2.2 (2020-10-09)
+### Features Added
+* Add `AuthorizationCodeCredential`
+
+
+## 0.2.1 (2020-10-06)
+### Features Added
+* Add `InteractiveBrowserCredential`
+
+
+## 0.2.0 (2020-09-11)
+### Features Added
+* Refactor `azidentity` on top of `azcore` refactor
+* Updated policies to conform to `policy.Policy` interface changes.
+* Updated non-retriable errors to conform to `azcore.NonRetriableError`.
+* Fixed calls to `Request.SetBody()` to include content type.
+* Switched endpoints to string types and removed extra parsing code.
+
+
+## 0.1.1 (2020-09-02)
+### Features Added
+* Add `AzureCLICredential` to `DefaultAzureCredential` chain
+
+
+## 0.1.0 (2020-07-23)
+### Features Added
+* Initial Release. Azure Identity library that provides Microsoft Entra token authentication support for the SDK.
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) Microsoft Corporation.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE
@@ -0,0 +1,307 @@
+# Migrating from autorest/adal to azidentity
+
+`azidentity` provides Microsoft Entra ID ([formerly Azure Active Directory](https://learn.microsoft.com/entra/fundamentals/new-name)) authentication for the newest Azure SDK modules (`github.com/azure-sdk-for-go/sdk/...`). Older Azure SDK packages (`github.com/azure-sdk-for-go/services/...`) use types from `github.com/go-autorest/autorest/adal` instead.
+
+This guide shows common authentication code using `autorest/adal` and its equivalent using `azidentity`.
+
+## Table of contents
+
+- [Acquire a token](#acquire-a-token)
+- [Client certificate authentication](#client-certificate-authentication)
+- [Client secret authentication](#client-secret-authentication)
+- [Configuration](#configuration)
+- [Device code authentication](#device-code-authentication)
+- [Managed identity](#managed-identity)
+- [Use azidentity credentials with older packages](#use-azidentity-credentials-with-older-packages)
+
+## Configuration
+
+### `autorest/adal`
+
+Token providers require a token audience (resource identifier) and an instance of `adal.OAuthConfig`, which requires a Microsoft Entra endpoint and tenant:
+
+```go
+import "github.com/Azure/go-autorest/autorest/adal"
+
+oauthCfg, err := adal.NewOAuthConfig("https://login.chinacloudapi.cn", tenantID)
+handle(err)
+
+spt, err := adal.NewServicePrincipalTokenWithSecret(
+ *oauthCfg, clientID, "https://management.chinacloudapi.cn/", &adal.ServicePrincipalTokenSecret{ClientSecret: secret},
+)
+```
+
+### `azidentity`
+
+A credential instance can acquire tokens for any audience. The audience for each token is determined by the client requesting it. Credentials require endpoint configuration only for sovereign or private clouds. The `azcore/cloud` package has predefined configuration for sovereign clouds such as Azure China:
+
+```go
+import (
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud"
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
+)
+
+clientOpts := azcore.ClientOptions{Cloud: cloud.AzureChina}
+
+cred, err := azidentity.NewClientSecretCredential(
+ tenantID, clientID, secret, &azidentity.ClientSecretCredentialOptions{ClientOptions: clientOpts},
+)
+handle(err)
+```
+
+## Client secret authentication
+
+### `autorest/adal`
+
+```go
+import (
+ "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2018-06-01/subscriptions"
+ "github.com/Azure/go-autorest/autorest"
+ "github.com/Azure/go-autorest/autorest/adal"
+)
+
+oauthCfg, err := adal.NewOAuthConfig("https://login.microsoftonline.com", tenantID)
+handle(err)
+spt, err := adal.NewServicePrincipalTokenWithSecret(
+ *oauthCfg, clientID, "https://management.azure.com/", &adal.ServicePrincipalTokenSecret{ClientSecret: secret},
+)
+handle(err)
+
+client := subscriptions.NewClient()
+client.Authorizer = autorest.NewBearerAuthorizer(spt)
+```
+
+### `azidentity`
+
+```go
+import (
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
+ "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armsubscriptions"
+)
+
+cred, err := azidentity.NewClientSecretCredential(tenantID, clientID, secret, nil)
+handle(err)
+
+client, err := armsubscriptions.NewClient(cred, nil)
+handle(err)
+```
+
+## Client certificate authentication
+
+### `autorest/adal`
+
+```go
+import (
+ "os"
+
+ "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2018-06-01/subscriptions"
+ "github.com/Azure/go-autorest/autorest"
+ "github.com/Azure/go-autorest/autorest/adal"
+)
+certData, err := os.ReadFile("./example.pfx")
+handle(err)
+
+certificate, rsaPrivateKey, err := decodePkcs12(certData, "")
+handle(err)
+
+oauthCfg, err := adal.NewOAuthConfig("https://login.microsoftonline.com", tenantID)
+handle(err)
+
+spt, err := adal.NewServicePrincipalTokenFromCertificate(
+ *oauthConfig, clientID, certificate, rsaPrivateKey, "https://management.azure.com/",
+)
+
+client := subscriptions.NewClient()
+client.Authorizer = autorest.NewBearerAuthorizer(spt)
+```
+
+### `azidentity`
+
+```go
+import (
+ "os"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
+ "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armsubscriptions"
+)
+
+certData, err := os.ReadFile("./example.pfx")
+handle(err)
+
+certs, key, err := azidentity.ParseCertificates(certData, nil)
+handle(err)
+
+cred, err = azidentity.NewClientCertificateCredential(tenantID, clientID, certs, key, nil)
+handle(err)
+
+client, err := armsubscriptions.NewClient(cred, nil)
+handle(err)
+```
+
+## Managed identity
+
+### `autorest/adal`
+
+```go
+import (
+ "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2018-06-01/subscriptions"
+ "github.com/Azure/go-autorest/autorest"
+ "github.com/Azure/go-autorest/autorest/adal"
+)
+
+spt, err := adal.NewServicePrincipalTokenFromManagedIdentity("https://management.azure.com/", nil)
+handle(err)
+
+client := subscriptions.NewClient()
+client.Authorizer = autorest.NewBearerAuthorizer(spt)
+```
+
+### `azidentity`
+
+```go
+import (
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
+ "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armsubscriptions"
+)
+
+cred, err := azidentity.NewManagedIdentityCredential(nil)
+handle(err)
+
+client, err := armsubscriptions.NewClient(cred, nil)
+handle(err)
+```
+
+### User-assigned identities
+
+`autorest/adal`:
+
+```go
+import "github.com/Azure/go-autorest/autorest/adal"
+
+opts := &adal.ManagedIdentityOptions{ClientID: "..."}
+spt, err := adal.NewServicePrincipalTokenFromManagedIdentity("https://management.azure.com/")
+handle(err)
+```
+
+`azidentity`:
+
+```go
+import "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
+
+opts := azidentity.ManagedIdentityCredentialOptions{ID: azidentity.ClientID("...")}
+cred, err := azidentity.NewManagedIdentityCredential(&opts)
+handle(err)
+```
+
+## Device code authentication
+
+### `autorest/adal`
+
+```go
+import (
+ "fmt"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2018-06-01/subscriptions"
+ "github.com/Azure/go-autorest/autorest"
+ "github.com/Azure/go-autorest/autorest/adal"
+)
+
+oauthClient := &http.Client{}
+oauthCfg, err := adal.NewOAuthConfig("https://login.microsoftonline.com", tenantID)
+handle(err)
+resource := "https://management.azure.com/"
+deviceCode, err := adal.InitiateDeviceAuth(oauthClient, *oauthCfg, clientID, resource)
+handle(err)
+
+// display instructions, wait for the user to authenticate
+fmt.Println(*deviceCode.Message)
+token, err := adal.WaitForUserCompletion(oauthClient, deviceCode)
+handle(err)
+
+spt, err := adal.NewServicePrincipalTokenFromManualToken(*oauthCfg, clientID, resource, *token)
+handle(err)
+
+client := subscriptions.NewClient()
+client.Authorizer = autorest.NewBearerAuthorizer(spt)
+```
+
+### `azidentity`
+
+```go
+import (
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
+ "github.com/Azure/azure-sdk-for-go/sdk/resourcemanager/resources/armsubscriptions"
+)
+
+cred, err := azidentity.NewDeviceCodeCredential(nil)
+handle(err)
+
+client, err := armsubscriptions.NewSubscriptionsClient(cred, nil)
+handle(err)
+```
+
+`azidentity.DeviceCodeCredential` will guide a user through authentication, printing instructions to the console by default. The user prompt is customizable. For more information, see the [package documentation](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#DeviceCodeCredential).
+
+## Acquire a token
+
+### `autorest/adal`
+
+```go
+import "github.com/Azure/go-autorest/autorest/adal"
+
+oauthCfg, err := adal.NewOAuthConfig("https://login.microsoftonline.com", tenantID)
+handle(err)
+
+spt, err := adal.NewServicePrincipalTokenWithSecret(
+ *oauthCfg, clientID, "https://vault.azure.net", &adal.ServicePrincipalTokenSecret{ClientSecret: secret},
+)
+
+err = spt.Refresh()
+if err == nil {
+ token := spt.Token
+}
+```
+
+### `azidentity`
+
+In ordinary usage, application code doesn't need to request tokens from credentials directly. Azure SDK clients handle token acquisition and refreshing internally. However, applications may call `GetToken()` to do so. All credential types have this method.
+
+```go
+import (
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
+)
+
+cred, err := azidentity.NewClientSecretCredential(tenantID, clientID, secret, nil)
+handle(err)
+
+tk, err := cred.GetToken(
+ context.TODO(), policy.TokenRequestOptions{Scopes: []string{"https://vault.azure.net/.default"}},
+)
+if err == nil {
+ token := tk.Token
+}
+```
+
+Note that `azidentity` credentials use the Microsoft Entra endpoint, which requires OAuth 2 scopes instead of the resource identifiers `autorest/adal` expects. For more information, see [Microsoft Entra ID documentation](https://learn.microsoft.com/entra/identity-platform/permissions-consent-overview).
+
+## Use azidentity credentials with older packages
+
+The [azidext module](https://pkg.go.dev/github.com/jongio/azidext/go/azidext) provides an adapter for `azidentity` credential types. The adapter enables using the credential types with older Azure SDK clients. For example:
+
+```go
+import (
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity"
+ "github.com/Azure/azure-sdk-for-go/services/resources/mgmt/2018-06-01/subscriptions"
+ "github.com/jongio/azidext/go/azidext"
+)
+
+cred, err := azidentity.NewClientSecretCredential(tenantID, clientID, secret, nil)
+handle(err)
+
+client := subscriptions.NewClient()
+client.Authorizer = azidext.NewTokenCredentialAdapter(cred, []string{"https://management.azure.com//.default"})
+```
+
+
@@ -0,0 +1,258 @@
+# Azure Identity Client Module for Go
+
+The Azure Identity module provides Microsoft Entra ID ([formerly Azure Active Directory](https://learn.microsoft.com/entra/fundamentals/new-name)) token authentication support across the Azure SDK. It includes a set of `TokenCredential` implementations, which can be used with Azure SDK clients supporting token authentication.
+
+[](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity)
+| [Microsoft Entra ID documentation](https://learn.microsoft.com/entra/identity/)
+| [Source code](https://github.com/Azure/azure-sdk-for-go/tree/main/sdk/azidentity)
+
+# Getting started
+
+## Install the module
+
+This project uses [Go modules](https://github.com/golang/go/wiki/Modules) for versioning and dependency management.
+
+Install the Azure Identity module:
+
+```sh
+go get -u github.com/Azure/azure-sdk-for-go/sdk/azidentity
+```
+
+## Prerequisites
+
+- an [Azure subscription](https://azure.microsoft.com/free/)
+- Go 1.18
+
+### Authenticating during local development
+
+When debugging and executing code locally, developers typically use their own accounts to authenticate calls to Azure services. The `azidentity` module supports authenticating through developer tools to simplify local development.
+
+#### Authenticating via the Azure CLI
+
+`DefaultAzureCredential` and `AzureCLICredential` can authenticate as the user
+signed in to the [Azure CLI](https://learn.microsoft.com/cli/azure). To sign in to the Azure CLI, run `az login`. On a system with a default web browser, the Azure CLI will launch the browser to authenticate a user.
+
+When no default browser is available, `az login` will use the device code
+authentication flow. This can also be selected manually by running `az login --use-device-code`.
+
+#### Authenticate via the Azure Developer CLI
+
+Developers coding outside of an IDE can also use the [Azure Developer CLI](https://aka.ms/azure-dev) to authenticate. Applications using the `DefaultAzureCredential` or the `AzureDeveloperCLICredential` can use the account logged in to the Azure Developer CLI to authenticate calls in their application when running locally.
+
+To authenticate with the Azure Developer CLI, run `azd auth login`. On a system with a default web browser, `azd` will launch the browser to authenticate. On systems without a default web browser, run `azd auth login --use-device-code` to use the device code authentication flow.
+
+## Key concepts
+
+### Credentials
+
+A credential is a type which contains or can obtain the data needed for a
+service client to authenticate requests. Service clients across the Azure SDK
+accept a credential instance when they are constructed, and use that credential
+to authenticate requests.
+
+The `azidentity` module focuses on OAuth authentication with Microsoft Entra ID. It offers a variety of credential types capable of acquiring a Microsoft Entra access token. See [Credential Types](#credential-types "Credential Types") for a list of this module's credential types.
+
+### DefaultAzureCredential
+
+`DefaultAzureCredential` is appropriate for most apps that will be deployed to Azure. It combines common production credentials with development credentials. It attempts to authenticate via the following mechanisms in this order, stopping when one succeeds:
+
+
+
+1. **Environment** - `DefaultAzureCredential` will read account information specified via [environment variables](#environment-variables) and use it to authenticate.
+1. **Workload Identity** - If the app is deployed on Kubernetes with environment variables set by the workload identity webhook, `DefaultAzureCredential` will authenticate the configured identity.
+1. **Managed Identity** - If the app is deployed to an Azure host with managed identity enabled, `DefaultAzureCredential` will authenticate with it.
+1. **Azure CLI** - If a user or service principal has authenticated via the Azure CLI `az login` command, `DefaultAzureCredential` will authenticate that identity.
+1. **Azure Developer CLI** - If the developer has authenticated via the Azure Developer CLI `azd auth login` command, the `DefaultAzureCredential` will authenticate with that account.
+
+> Note: `DefaultAzureCredential` is intended to simplify getting started with the SDK by handling common scenarios with reasonable default behaviors. Developers who want more control or whose scenario isn't served by the default settings should use other credential types.
+
+## Managed Identity
+
+`DefaultAzureCredential` and `ManagedIdentityCredential` support
+[managed identity authentication](https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/overview)
+in any hosting environment which supports managed identities, such as (this list is not exhaustive):
+* [Azure App Service](https://learn.microsoft.com/azure/app-service/overview-managed-identity)
+* [Azure Arc](https://learn.microsoft.com/azure/azure-arc/servers/managed-identity-authentication)
+* [Azure Cloud Shell](https://learn.microsoft.com/azure/cloud-shell/msi-authorization)
+* [Azure Kubernetes Service](https://learn.microsoft.com/azure/aks/use-managed-identity)
+* [Azure Service Fabric](https://learn.microsoft.com/azure/service-fabric/concepts-managed-identity)
+* [Azure Virtual Machines](https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/how-to-use-vm-token)
+
+## Examples
+
+- [Authenticate with DefaultAzureCredential](#authenticate-with-defaultazurecredential "Authenticate with DefaultAzureCredential")
+- [Define a custom authentication flow with ChainedTokenCredential](#define-a-custom-authentication-flow-with-chainedtokencredential "Define a custom authentication flow with ChainedTokenCredential")
+- [Specify a user-assigned managed identity for DefaultAzureCredential](#specify-a-user-assigned-managed-identity-for-defaultazurecredential)
+
+### Authenticate with DefaultAzureCredential
+
+This example demonstrates authenticating a client from the `armresources` module with `DefaultAzureCredential`.
+
+```go
+cred, err := azidentity.NewDefaultAzureCredential(nil)
+if err != nil {
+ // handle error
+}
+
+client := armresources.NewResourceGroupsClient("subscription ID", cred, nil)
+```
+
+### Specify a user-assigned managed identity for DefaultAzureCredential
+
+To configure `DefaultAzureCredential` to authenticate a user-assigned managed identity, set the environment variable `AZURE_CLIENT_ID` to the identity's client ID.
+
+### Define a custom authentication flow with `ChainedTokenCredential`
+
+`DefaultAzureCredential` is generally the quickest way to get started developing apps for Azure. For more advanced scenarios, `ChainedTokenCredential` links multiple credential instances to be tried sequentially when authenticating. It will try each chained credential in turn until one provides a token or fails to authenticate due to an error.
+
+The following example demonstrates creating a credential, which will attempt to authenticate using managed identity. It will fall back to authenticating via the Azure CLI when a managed identity is unavailable.
+
+```go
+managed, err := azidentity.NewManagedIdentityCredential(nil)
+if err != nil {
+ // handle error
+}
+azCLI, err := azidentity.NewAzureCLICredential(nil)
+if err != nil {
+ // handle error
+}
+chain, err := azidentity.NewChainedTokenCredential([]azcore.TokenCredential{managed, azCLI}, nil)
+if err != nil {
+ // handle error
+}
+
+client := armresources.NewResourceGroupsClient("subscription ID", chain, nil)
+```
+
+## Credential Types
+
+### Authenticating Azure Hosted Applications
+
+|Credential|Usage
+|-|-
+|[DefaultAzureCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#DefaultAzureCredential)|Simplified authentication experience for getting started developing Azure apps
+|[ChainedTokenCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#ChainedTokenCredential)|Define custom authentication flows, composing multiple credentials
+|[EnvironmentCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#EnvironmentCredential)|Authenticate a service principal or user configured by environment variables
+|[ManagedIdentityCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#ManagedIdentityCredential)|Authenticate the managed identity of an Azure resource
+|[WorkloadIdentityCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#WorkloadIdentityCredential)|Authenticate a workload identity on Kubernetes
+
+### Authenticating Service Principals
+
+|Credential|Usage
+|-|-
+|[AzurePipelinesCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#AzurePipelinesCredential)|Authenticate an Azure Pipelines [service connection](https://learn.microsoft.com/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml)
+|[ClientAssertionCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#ClientAssertionCredential)|Authenticate a service principal with a signed client assertion
+|[ClientCertificateCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#ClientCertificateCredential)|Authenticate a service principal with a certificate
+|[ClientSecretCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#ClientSecretCredential)|Authenticate a service principal with a secret
+
+### Authenticating Users
+
+|Credential|Usage
+|-|-
+|[InteractiveBrowserCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#InteractiveBrowserCredential)|Interactively authenticate a user with the default web browser
+|[DeviceCodeCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#DeviceCodeCredential)|Interactively authenticate a user on a device with limited UI
+|[UsernamePasswordCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#UsernamePasswordCredential)|Authenticate a user with a username and password
+
+### Authenticating via Development Tools
+
+|Credential|Usage
+|-|-
+|[AzureCLICredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#AzureCLICredential)|Authenticate as the user signed in to the Azure CLI
+|[`AzureDeveloperCLICredential`](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#AzureDeveloperCLICredential)|Authenticates as the user signed in to the Azure Developer CLI
+
+## Environment Variables
+
+`DefaultAzureCredential` and `EnvironmentCredential` can be configured with environment variables. Each type of authentication requires values for specific variables:
+
+#### Service principal with secret
+
+|variable name|value
+|-|-
+|`AZURE_CLIENT_ID`|ID of a Microsoft Entra application
+|`AZURE_TENANT_ID`|ID of the application's Microsoft Entra tenant
+|`AZURE_CLIENT_SECRET`|one of the application's client secrets
+
+#### Service principal with certificate
+
+|variable name|value
+|-|-
+|`AZURE_CLIENT_ID`|ID of a Microsoft Entra application
+|`AZURE_TENANT_ID`|ID of the application's Microsoft Entra tenant
+|`AZURE_CLIENT_CERTIFICATE_PATH`|path to a certificate file including private key
+|`AZURE_CLIENT_CERTIFICATE_PASSWORD`|password of the certificate file, if any
+
+#### Username and password
+
+|variable name|value
+|-|-
+|`AZURE_CLIENT_ID`|ID of a Microsoft Entra application
+|`AZURE_USERNAME`|a username (usually an email address)
+|`AZURE_PASSWORD`|that user's password
+
+Configuration is attempted in the above order. For example, if values for a
+client secret and certificate are both present, the client secret will be used.
+
+## Token caching
+
+Token caching is an `azidentity` feature that allows apps to:
+
+* Cache tokens in memory (default) or on disk (opt-in).
+* Improve resilience and performance.
+* Reduce the number of requests made to Microsoft Entra ID to obtain access tokens.
+
+For more details, see the [token caching documentation](https://aka.ms/azsdk/go/identity/caching).
+
+## Troubleshooting
+
+### Error Handling
+
+Credentials return an `error` when they fail to authenticate or lack data they require to authenticate. For guidance on resolving errors from specific credential types, see the [troubleshooting guide](https://aka.ms/azsdk/go/identity/troubleshoot).
+
+For more details on handling specific Microsoft Entra errors, see the Microsoft Entra [error code documentation](https://learn.microsoft.com/entra/identity-platform/reference-error-codes).
+
+### Logging
+
+This module uses the classification-based logging implementation in `azcore`. To enable console logging for all SDK modules, set `AZURE_SDK_GO_LOGGING` to `all`. Use the `azcore/log` package to control log event output or to enable logs for `azidentity` only. For example:
+```go
+import azlog "github.com/Azure/azure-sdk-for-go/sdk/azcore/log"
+
+// print log output to stdout
+azlog.SetListener(func(event azlog.Event, s string) {
+ fmt.Println(s)
+})
+
+// include only azidentity credential logs
+azlog.SetEvents(azidentity.EventAuthentication)
+```
+
+Credentials log basic information only, such as `GetToken` success or failure and errors. These log entries don't contain authentication secrets but may contain sensitive information.
+
+## Next steps
+
+Client and management modules listed on the [Azure SDK releases page](https://azure.github.io/azure-sdk/releases/latest/go.html) support authenticating with `azidentity` credential types. You can learn more about using these libraries in their documentation, which is linked from the release page.
+
+## Provide Feedback
+
+If you encounter bugs or have suggestions, please
+[open an issue](https://github.com/Azure/azure-sdk-for-go/issues).
+
+## Contributing
+
+This project welcomes contributions and suggestions. Most contributions require
+you to agree to a Contributor License Agreement (CLA) declaring that you have
+the right to, and actually do, grant us the rights to use your contribution.
+For details, visit [https://cla.microsoft.com](https://cla.microsoft.com).
+
+When you submit a pull request, a CLA-bot will automatically determine whether
+you need to provide a CLA and decorate the PR appropriately (e.g., label,
+comment). Simply follow the instructions provided by the bot. You will only
+need to do this once across all repos using our CLA.
+
+This project has adopted the
+[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
+For more information, see the
+[Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
+or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any
+additional questions or comments.
+
+
@@ -0,0 +1,71 @@
+## Token caching in the Azure Identity client module
+
+*Token caching* is a feature provided by the Azure Identity library that allows apps to:
+
+- Improve their resilience and performance.
+- Reduce the number of requests made to Microsoft Entra ID to obtain access tokens.
+- Reduce the number of times the user is prompted to authenticate.
+
+When an app needs to access a protected Azure resource, it typically needs to obtain an access token from Entra ID. Obtaining that token involves sending a request to Entra ID and may also involve prompting the user. Entra ID then validates the credentials provided in the request and issues an access token.
+
+Token caching, via the Azure Identity library, allows the app to store this access token [in memory](#in-memory-token-caching), where it's accessible to the current process, or [on disk](#persistent-token-caching) where it can be accessed across application or process invocations. The token can then be retrieved quickly and easily the next time the app needs to access the same resource. The app can avoid making another request to Entra ID, which reduces network traffic and improves resilience. Additionally, in scenarios where the app is authenticating users, token caching also avoids prompting the user each time new tokens are requested.
+
+### In-memory token caching
+
+*In-memory token caching* is the default option provided by the Azure Identity library. This caching approach allows apps to store access tokens in memory. With in-memory token caching, the library first determines if a valid access token for the requested resource is already stored in memory. If a valid token is found, it's returned to the app without the need to make another request to Entra ID. If a valid token isn't found, the library will automatically acquire a token by sending a request to Entra ID. The in-memory token cache provided by the Azure Identity library is thread-safe.
+
+**Note:** When Azure Identity library credentials are used with Azure service libraries (for example, Azure Blob Storage), the in-memory token caching is active in the `Pipeline` layer as well. All `TokenCredential` implementations are supported there, including custom implementations external to the Azure Identity library.
+
+#### Caching cannot be disabled
+
+As there are many levels of caching, it's not possible disable in-memory caching. However, the in-memory cache may be cleared by creating a new credential instance.
+
+### Persistent token caching
+
+> Only azidentity v1.5.0-beta versions support persistent token caching
+
+*Persistent disk token caching* is an opt-in feature in the Azure Identity library. The feature allows apps to cache access tokens in an encrypted, persistent storage mechanism. As indicated in the following table, the storage mechanism differs across operating systems.
+
+| Operating system | Storage mechanism |
+|------------------|---------------------------------------|
+| Linux | kernel key retention service (keyctl) |
+| macOS | Keychain |
+| Windows | DPAPI |
+
+By default the token cache will protect any data which is persisted using the user data protection APIs available on the current platform.
+However, there are cases where no data protection is available, and applications may choose to allow storing the token cache in an unencrypted state by setting `TokenCachePersistenceOptions.AllowUnencryptedStorage` to `true`. This allows a credential to fall back to unencrypted storage if it can't encrypt the cache. However, we do not recommend using this storage method due to its significantly lower security measures. In addition, tokens are not encrypted solely to the current user, which could potentially allow unauthorized access to the cache by individuals with machine access.
+
+With persistent disk token caching enabled, the library first determines if a valid access token for the requested resource is already stored in the persistent cache. If a valid token is found, it's returned to the app without the need to make another request to Entra ID. Additionally, the tokens are preserved across app runs, which:
+
+- Makes the app more resilient to failures.
+- Ensures the app can continue to function during an Entra ID outage or disruption.
+- Avoids having to prompt users to authenticate each time the process is restarted.
+
+>IMPORTANT! The token cache contains sensitive data and **MUST** be protected to prevent compromising accounts. All application decisions regarding the persistence of the token cache must consider that a breach of its content will fully compromise all the accounts it contains.
+
+#### Example code
+
+See the [package documentation](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity@v1.6.0-beta.2#pkg-overview) for example code demonstrating how to configure persistent caching and access cached data.
+
+### Credentials supporting token caching
+
+The following table indicates the state of in-memory and persistent caching in each credential type.
+
+**Note:** In-memory caching is activated by default. Persistent token caching needs to be enabled as shown in [this example](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity@v1.5.0-beta.1#example-package-PersistentCache).
+
+| Credential | In-memory token caching | Persistent token caching |
+|--------------------------------|---------------------------------------------------------------------|--------------------------|
+| `AzureCLICredential` | Not Supported | Not Supported |
+| `AzureDeveloperCLICredential` | Not Supported | Not Supported |
+| `AzurePipelinesCredential` | Supported | Supported |
+| `ClientAssertionCredential` | Supported | Supported |
+| `ClientCertificateCredential` | Supported | Supported |
+| `ClientSecretCredential` | Supported | Supported |
+| `DefaultAzureCredential` | Supported if the target credential in the default chain supports it | Not Supported |
+| `DeviceCodeCredential` | Supported | Supported |
+| `EnvironmentCredential` | Supported | Not Supported |
+| `InteractiveBrowserCredential` | Supported | Supported |
+| `ManagedIdentityCredential` | Supported | Not Supported |
+| `OnBehalfOfCredential` | Supported | Supported |
+| `UsernamePasswordCredential` | Supported | Supported |
+| `WorkloadIdentityCredential` | Supported | Supported |
@@ -0,0 +1,241 @@
+# Troubleshoot Azure Identity authentication issues
+
+This troubleshooting guide covers failure investigation techniques, common errors for the credential types in the `azidentity` module, and mitigation steps to resolve these errors.
+
+## Table of contents
+
+- [Handle azidentity errors](#handle-azidentity-errors)
+ - [Permission issues](#permission-issues)
+- [Find relevant information in errors](#find-relevant-information-in-errors)
+- [Enable and configure logging](#enable-and-configure-logging)
+- [Troubleshoot AzureCLICredential authentication issues](#troubleshoot-azureclicredential-authentication-issues)
+- [Troubleshoot AzureDeveloperCLICredential authentication issues](#troubleshoot-azuredeveloperclicredential-authentication-issues)
+- [Troubleshoot AzurePipelinesCredential authentication issues](#troubleshoot-azurepipelinescredential-authentication-issues)
+- [Troubleshoot ClientCertificateCredential authentication issues](#troubleshoot-clientcertificatecredential-authentication-issues)
+- [Troubleshoot ClientSecretCredential authentication issues](#troubleshoot-clientsecretcredential-authentication-issues)
+- [Troubleshoot DefaultAzureCredential authentication issues](#troubleshoot-defaultazurecredential-authentication-issues)
+- [Troubleshoot EnvironmentCredential authentication issues](#troubleshoot-environmentcredential-authentication-issues)
+- [Troubleshoot ManagedIdentityCredential authentication issues](#troubleshoot-managedidentitycredential-authentication-issues)
+ - [Azure App Service and Azure Functions managed identity](#azure-app-service-and-azure-functions-managed-identity)
+ - [Azure Kubernetes Service managed identity](#azure-kubernetes-service-managed-identity)
+ - [Azure Virtual Machine managed identity](#azure-virtual-machine-managed-identity)
+- [Troubleshoot UsernamePasswordCredential authentication issues](#troubleshoot-usernamepasswordcredential-authentication-issues)
+- [Troubleshoot WorkloadIdentityCredential authentication issues](#troubleshoot-workloadidentitycredential-authentication-issues)
+- [Get additional help](#get-additional-help)
+
+## Handle azidentity errors
+
+Any service client method that makes a request to the service may return an error due to authentication failure. This is because the credential authenticates on the first call to the service and on any subsequent call that needs to refresh an access token. Authentication errors include a description of the failure and possibly an error message from Microsoft Entra ID. Depending on the application, these errors may or may not be recoverable.
+
+### Permission issues
+
+Service client errors with a status code of 401 or 403 often indicate that authentication succeeded but the caller doesn't have permission to access the specified API. Check the service documentation to determine which RBAC roles are needed for the request, and ensure the authenticated user or service principal has the appropriate role assignments.
+
+## Find relevant information in errors
+
+Authentication errors can include responses from Microsoft Entra ID and often contain information helpful in diagnosis. Consider the following error message:
+
+```
+ClientSecretCredential authentication failed
+POST https://login.microsoftonline.com/3c631bb7-a9f7-4343-a5ba-a615913/oauth2/v2.0/token
+--------------------------------------------------------------------------------
+RESPONSE 401 Unauthorized
+--------------------------------------------------------------------------------
+{
+ "error": "invalid_client",
+ "error_description": "AADSTS7000215: Invalid client secret provided. Ensure the secret being sent in the request is the client secret value, not the client secret ID, for a secret added to app '86be4c01-505b-45e9-bfc0-9b825fd84'.\r\nTrace ID: 03da4b8e-5ffe-48ca-9754-aff4276f0100\r\nCorrelation ID: 7b12f9bb-2eef-42e3-ad75-eee69ec9088d\r\nTimestamp: 2022-03-02 18:25:26Z",
+ "error_codes": [
+ 7000215
+ ],
+ "timestamp": "2022-03-02 18:25:26Z",
+ "trace_id": "03da4b8e-5ffe-48ca-9754-aff4276f0100",
+ "correlation_id": "7b12f9bb-2eef-42e3-ad75-eee69ec9088d",
+ "error_uri": "https://login.microsoftonline.com/error?code=7000215"
+}
+--------------------------------------------------------------------------------
+```
+
+This error contains several pieces of information:
+
+- __Failing Credential Type__: The type of credential that failed to authenticate. This can be helpful when diagnosing issues with chained credential types such as `DefaultAzureCredential` or `ChainedTokenCredential`.
+
+- __Microsoft Entra ID Error Code and Message__: The error code and message returned by Microsoft Entra ID. This can give insight into the specific reason the request failed. For instance, in this case authentication failed because the provided client secret is incorrect. [Microsoft Entra ID documentation](https://learn.microsoft.com/entra/identity-platform/reference-error-codes#aadsts-error-codes) has more information on AADSTS error codes.
+
+- __Correlation ID and Timestamp__: The correlation ID and timestamp identify the request in server-side logs. This information can be useful to support engineers diagnosing unexpected Microsoft Entra failures.
+
+### Enable and configure logging
+
+`azidentity` provides the same logging capabilities as the rest of the Azure SDK. The simplest way to see the logs to help debug authentication issues is to print credential logs to the console.
+```go
+import azlog "github.com/Azure/azure-sdk-for-go/sdk/azcore/log"
+
+// print log output to stdout
+azlog.SetListener(func(event azlog.Event, s string) {
+ fmt.Println(s)
+})
+
+// include only azidentity credential logs
+azlog.SetEvents(azidentity.EventAuthentication)
+```
+
+<a id="dac"></a>
+## Troubleshoot DefaultAzureCredential authentication issues
+
+| Error |Description| Mitigation |
+|---|---|---|
+|"DefaultAzureCredential failed to acquire a token"|No credential in the `DefaultAzureCredential` chain provided a token|<ul><li>[Enable logging](#enable-and-configure-logging) to get further diagnostic information.</li><li>Consult the troubleshooting guide for underlying credential types for more information.</li><ul><li>[EnvironmentCredential](#troubleshoot-environmentcredential-authentication-issues)</li><li>[ManagedIdentityCredential](#troubleshoot-managedidentitycredential-authentication-issues)</li><li>[AzureCLICredential](#troubleshoot-azureclicredential-authentication-issues)</li></ul>|
+|Error from the client with a status code of 401 or 403|Authentication succeeded but the authorizing Azure service responded with a 401 (Unauthorized), or 403 (Forbidden) status code|<ul><li>[Enable logging](#enable-and-configure-logging) to determine which credential in the chain returned the authenticating token.</li><li>If an unexpected credential is returning a token, check application configuration such as environment variables.</li><li>Ensure the correct role is assigned to the authenticated identity. For example, a service specific role rather than the subscription Owner role.</li></ul>|
+|"managed identity timed out"|`DefaultAzureCredential` sets a short timeout on its first managed identity authentication attempt to prevent very long timeouts during local development when no managed identity is available. That timeout causes this error in production when an application requests a token before the hosting environment is ready to provide one.|Use [ManagedIdentityCredential](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#ManagedIdentityCredential) directly, at least in production. It doesn't set a timeout on its authentication attempts.|
+
+## Troubleshoot EnvironmentCredential authentication issues
+
+| Error Message |Description| Mitigation |
+|---|---|---|
+|Missing or incomplete environment variable configuration|A valid combination of environment variables wasn't set|Ensure the appropriate environment variables are set for the intended authentication method as described in the [module documentation](https://pkg.go.dev/github.com/Azure/azure-sdk-for-go/sdk/azidentity#EnvironmentCredential)|
+
+<a id="client-secret"></a>
+## Troubleshoot ClientSecretCredential authentication issues
+
+| Error Code | Issue | Mitigation |
+|---|---|---|
+|AADSTS7000215|An invalid client secret was provided.|Ensure the secret provided to the credential constructor is valid. If unsure, create a new client secret using the Azure portal. Details on creating a new client secret are in [Microsoft Entra ID documentation](https://learn.microsoft.com/entra/identity-platform/howto-create-service-principal-portal#option-2-create-a-new-application-secret).|
+|AADSTS7000222|An expired client secret was provided.|Create a new client secret using the Azure portal. Details on creating a new client secret are in [Microsoft Entra ID documentation](https://learn.microsoft.com/entra/identity-platform/howto-create-service-principal-portal#option-2-create-a-new-application-secret).|
+|AADSTS700016|The specified application wasn't found in the specified tenant.|Ensure the client and tenant IDs provided to the credential constructor are correct for your application registration. For multi-tenant apps, ensure the application has been added to the desired tenant by a tenant admin. To add a new application in the desired tenant, follow the [Microsoft Entra ID instructions](https://learn.microsoft.com/entra/identity-platform/howto-create-service-principal-portal).|
+
+<a id="client-cert"></a>
+## Troubleshoot ClientCertificateCredential authentication issues
+
+| Error Code | Description | Mitigation |
+|---|---|---|
+|AADSTS700027|Client assertion contains an invalid signature.|Ensure the specified certificate has been uploaded to the application registration as described in [Microsoft Entra ID documentation](https://learn.microsoft.com/entra/identity-platform/howto-create-service-principal-portal#option-1-upload-a-certificate).|
+|AADSTS700016|The specified application wasn't found in the specified tenant.|Ensure the client and tenant IDs provided to the credential constructor are correct for your application registration. For multi-tenant apps, ensure the application has been added to the desired tenant by a tenant admin. To add a new application in the desired tenant, follow the [Microsoft Entra ID instructions](https://learn.microsoft.com/entra/identity-platform/howto-create-service-principal-portal).|
+
+<a id="username-password"></a>
+## Troubleshoot UsernamePasswordCredential authentication issues
+
+| Error Code | Issue | Mitigation |
+|---|---|---|
+|AADSTS50126|The provided username or password is invalid.|Ensure the username and password provided to the credential constructor are valid.|
+
+<a id="managed-id"></a>
+## Troubleshoot ManagedIdentityCredential authentication issues
+
+`ManagedIdentityCredential` is designed to work on a variety of Azure hosts support managed identity. Configuration and troubleshooting vary from host to host. The below table lists the Azure hosts that can be assigned a managed identity and are supported by `ManagedIdentityCredential`.
+
+|Host Environment| | |
+|---|---|---|
+|Azure Virtual Machines and Scale Sets|[Configuration](https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/qs-configure-portal-windows-vm)|[Troubleshooting](#azure-virtual-machine-managed-identity)|
+|Azure App Service and Azure Functions|[Configuration](https://learn.microsoft.com/azure/app-service/overview-managed-identity)|[Troubleshooting](#azure-app-service-and-azure-functions-managed-identity)|
+|Azure Kubernetes Service|[Configuration](https://azure.github.io/aad-pod-identity/docs/)|[Troubleshooting](#azure-kubernetes-service-managed-identity)|
+|Azure Arc|[Configuration](https://learn.microsoft.com/azure/azure-arc/servers/managed-identity-authentication)||
+|Azure Service Fabric|[Configuration](https://learn.microsoft.com/azure/service-fabric/concepts-managed-identity)||
+
+### Azure Virtual Machine managed identity
+
+| Error Message |Description| Mitigation |
+|---|---|---|
+|The requested identity hasn’t been assigned to this resource.|The IMDS endpoint responded with a status code of 400, indicating the requested identity isn’t assigned to the VM.|If using a user assigned identity, ensure the specified ID is correct.<p/><p/>If using a system assigned identity, make sure it has been enabled as described in [managed identity documentation](https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/qs-configure-portal-windows-vm#enable-system-assigned-managed-identity-on-an-existing-vm).|
+|The request failed due to a gateway error.|The request to the IMDS endpoint failed due to a gateway error, 502 or 504 status code.|IMDS doesn't support requests via proxy or gateway. Disable proxies or gateways running on the VM for requests to the IMDS endpoint `http://169.254.169.254`|
+|No response received from the managed identity endpoint.|No response was received for the request to IMDS or the request timed out.|<ul><li>Ensure the VM is configured for managed identity as described in [managed identity documentation](https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/qs-configure-portal-windows-vm).</li><li>Verify the IMDS endpoint is reachable on the VM. See [below](#verify-imds-is-available-on-the-vm) for instructions.</li></ul>|
+|Multiple attempts failed to obtain a token from the managed identity endpoint.|The credential has exhausted its retries for a token request.|<ul><li>Refer to the error message for more details on specific failures.<li>Ensure the VM is configured for managed identity as described in [managed identity documentation](https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/qs-configure-portal-windows-vm).</li><li>Verify the IMDS endpoint is reachable on the VM. See [below](#verify-imds-is-available-on-the-vm) for instructions.</li></ul>|
+
+#### Verify IMDS is available on the VM
+
+If you have access to the VM, you can use `curl` to verify the managed identity endpoint is available.
+
+```sh
+curl 'http://169.254.169.254/metadata/identity/oauth2/token?resource=https://management.core.windows.net&api-version=2018-02-01' -H "Metadata: true"
+```
+
+> This command's output will contain an access token and SHOULD NOT BE SHARED, to avoid compromising account security.
+
+### Azure App Service and Azure Functions managed identity
+
+| Error Message |Description| Mitigation |
+|---|---|---|
+|Get "`http://169.254.169.254/...`" i/o timeout|The App Service host hasn't set environment variables for managed identity configuration.|<ul><li>Ensure the App Service is configured for managed identity as described in [App Service documentation](https://learn.microsoft.com/azure/app-service/overview-managed-identity).</li><li>Verify the App Service environment is properly configured and the managed identity endpoint is available. See [below](#verify-the-app-service-managed-identity-endpoint-is-available) for instructions.</li></ul>|
+
+#### Verify the App Service managed identity endpoint is available
+
+If you can SSH into the App Service, you can verify managed identity is available in the environment. First ensure the environment variables `IDENTITY_ENDPOINT` and `IDENTITY_SECRET` are set. Then you can verify the managed identity endpoint is available using `curl`.
+
+```sh
+curl "$IDENTITY_ENDPOINT?resource=https://management.core.windows.net&api-version=2019-08-01" -H "X-IDENTITY-HEADER: $IDENTITY_HEADER"
+```
+
+> This command's output will contain an access token and SHOULD NOT BE SHARED, to avoid compromising account security.
+
+### Azure Kubernetes Service managed identity
+
+#### Pod Identity
+
+| Error Message |Description| Mitigation |
+|---|---|---|
+|"no azure identity found for request clientID"|The application attempted to authenticate before an identity was assigned to its pod|Verify the pod is labeled correctly. This also occurs when a correctly labeled pod authenticates before the identity is ready. To prevent initialization races, configure NMI to set the Retry-After header in its responses as described in [Pod Identity documentation](https://azure.github.io/aad-pod-identity/docs/configure/feature_flags/#set-retry-after-header-in-nmi-response).
+
+<a id="azure-cli"></a>
+## Troubleshoot AzureCLICredential authentication issues
+
+| Error Message |Description| Mitigation |
+|---|---|---|
+|Azure CLI not found on path|The Azure CLI isn’t installed or isn't on the application's path.|<ul><li>Ensure the Azure CLI is installed as described in [Azure CLI documentation](https://learn.microsoft.com/cli/azure/install-azure-cli).</li><li>Validate the installation location is in the application's `PATH` environment variable.</li></ul>|
+|Please run 'az login' to set up account|No account is currently logged into the Azure CLI, or the login has expired.|<ul><li>Run `az login` to log into the Azure CLI. More information about Azure CLI authentication is available in the [Azure CLI documentation](https://learn.microsoft.com/cli/azure/authenticate-azure-cli).</li><li>Verify that the Azure CLI can obtain tokens. See [below](#verify-the-azure-cli-can-obtain-tokens) for instructions.</li></ul>|
+
+#### Verify the Azure CLI can obtain tokens
+
+You can manually verify that the Azure CLI can authenticate and obtain tokens. First, use the `account` command to verify the logged in account.
+
+```azurecli
+az account show
+```
+
+Once you've verified the Azure CLI is using the correct account, you can validate that it's able to obtain tokens for that account.
+
+```azurecli
+az account get-access-token --output json --resource https://management.core.windows.net
+```
+
+> This command's output will contain an access token and SHOULD NOT BE SHARED, to avoid compromising account security.
+
+<a id="azd"></a>
+## Troubleshoot AzureDeveloperCLICredential authentication issues
+
+| Error Message |Description| Mitigation |
+|---|---|---|
+|Azure Developer CLI not found on path|The Azure Developer CLI isn't installed or couldn't be found.|<ul><li>Ensure the Azure Developer CLI is properly installed. See the installation instructions at [Install or update the Azure Developer CLI](https://learn.microsoft.com/azure/developer/azure-developer-cli/install-azd).</li><li>Validate the installation location has been added to the `PATH` environment variable.</li></ul>|
+|Please run "azd auth login"|No account is logged into the Azure Developer CLI, or the login has expired.|<ul><li>Log in to the Azure Developer CLI using the `azd login` command.</li><li>Validate that the Azure Developer CLI can obtain tokens. For instructions, see [Verify the Azure Developer CLI can obtain tokens](#verify-the-azure-developer-cli-can-obtain-tokens).</li></ul>|
+
+#### Verify the Azure Developer CLI can obtain tokens
+
+You can manually verify that the Azure Developer CLI is properly authenticated and can obtain tokens. First, use the `config` command to verify the account that is currently logged in to the Azure Developer CLI.
+
+```sh
+azd config list
+```
+
+Once you've verified the Azure Developer CLI is using correct account, you can validate that it's able to obtain tokens for this account.
+
+```sh
+azd auth token --output json --scope https://management.core.windows.net/.default
+```
+>Note that output of this command will contain a valid access token, and SHOULD NOT BE SHARED to avoid compromising account security.
+
+<a id="workload"></a>
+## Troubleshoot `WorkloadIdentityCredential` authentication issues
+
+| Error Message |Description| Mitigation |
+|---|---|---|
+|no client ID/tenant ID/token file specified|Incomplete configuration|In most cases these values are provided via environment variables set by Azure Workload Identity.<ul><li>If your application runs on Azure Kubernetes Servide (AKS) or a cluster that has deployed the Azure Workload Identity admission webhook, check pod labels and service account configuration. See the [AKS documentation](https://learn.microsoft.com/azure/aks/workload-identity-deploy-cluster#disable-workload-identity) and [Azure Workload Identity troubleshooting guide](https://azure.github.io/azure-workload-identity/docs/troubleshooting.html) for more details.<li>If your application isn't running on AKS or your cluster hasn't deployed the Workload Identity admission webhook, set these values in `WorkloadIdentityCredentialOptions`
+
+<a id="apc"></a>
+## Troubleshoot AzurePipelinesCredential authentication issues
+
+| Error Message |Description| Mitigation |
+|---|---|---|
+| AADSTS900023: Specified tenant identifier 'some tenant ID' is neither a valid DNS name, nor a valid external domain.|The `tenantID` argument to `NewAzurePipelinesCredential` is incorrect| Verify the tenant ID. It must identify the tenant of the user-assigned managed identity or service principal configured for the service connection.|
+| No service connection found with identifier |The `serviceConnectionID` argument to `NewAzurePipelinesCredential` is incorrect| Verify the service connection ID. This parameter refers to the `resourceId` of the Azure Service Connection. It can also be found in the query string of the service connection's configuration in Azure DevOps. [Azure Pipelines documentation](https://learn.microsoft.com/azure/devops/pipelines/library/service-endpoints?view=azure-devops&tabs=yaml) has more information about service connections.|
+|302 (Found) response from OIDC endpoint|The `systemAccessToken` argument to `NewAzurePipelinesCredential` is incorrect|Check pipeline configuration. This value comes from the predefined variable `System.AccessToken` [as described in Azure Pipelines documentation](https://learn.microsoft.com/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken).|
+
+## Get additional help
+
+Additional information on ways to reach out for support can be found in [SUPPORT.md](https://github.com/Azure/azure-sdk-for-go/blob/main/SUPPORT.md).
@@ -0,0 +1,6 @@
+{
+ "AssetsRepo": "Azure/azure-sdk-assets",
+ "AssetsRepoPrefixPath": "go",
+ "TagPrefix": "go/azidentity",
+ "Tag": "go/azidentity_087379b475"
+}
@@ -0,0 +1,95 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/url"
+ "strings"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/public"
+)
+
+var supportedAuthRecordVersions = []string{"1.0"}
+
+// authenticationRecord is non-secret account information about an authenticated user that user credentials such as
+// [DeviceCodeCredential] and [InteractiveBrowserCredential] can use to access previously cached authentication
+// data. Call these credentials' Authenticate method to get an authenticationRecord for a user.
+type authenticationRecord struct {
+ // Authority is the URL of the authority that issued the token.
+ Authority string `json:"authority"`
+
+ // ClientID is the ID of the application that authenticated the user.
+ ClientID string `json:"clientId"`
+
+ // HomeAccountID uniquely identifies the account.
+ HomeAccountID string `json:"homeAccountId"`
+
+ // TenantID identifies the tenant in which the user authenticated.
+ TenantID string `json:"tenantId"`
+
+ // Username is the user's preferred username.
+ Username string `json:"username"`
+
+ // Version of the AuthenticationRecord.
+ Version string `json:"version"`
+}
+
+// UnmarshalJSON implements json.Unmarshaler for AuthenticationRecord
+func (a *authenticationRecord) UnmarshalJSON(b []byte) error {
+ // Default unmarshaling is fine but we want to return an error if the record's version isn't supported i.e., we
+ // want to inspect the unmarshalled values before deciding whether to return an error. Unmarshaling a formally
+ // different type enables this by assigning all the fields without recursing into this method.
+ type r authenticationRecord
+ err := json.Unmarshal(b, (*r)(a))
+ if err != nil {
+ return err
+ }
+ if a.Version == "" {
+ return errors.New("AuthenticationRecord must have a version")
+ }
+ for _, v := range supportedAuthRecordVersions {
+ if a.Version == v {
+ return nil
+ }
+ }
+ return fmt.Errorf("unsupported AuthenticationRecord version %q. This module supports %v", a.Version, supportedAuthRecordVersions)
+}
+
+// account returns the AuthenticationRecord as an MSAL Account. The account is zero-valued when the AuthenticationRecord is zero-valued.
+func (a *authenticationRecord) account() public.Account {
+ return public.Account{
+ Environment: a.Authority,
+ HomeAccountID: a.HomeAccountID,
+ PreferredUsername: a.Username,
+ }
+}
+
+func newAuthenticationRecord(ar public.AuthResult) (authenticationRecord, error) {
+ u, err := url.Parse(ar.IDToken.Issuer)
+ if err != nil {
+ return authenticationRecord{}, fmt.Errorf("Authenticate expected a URL issuer but got %q", ar.IDToken.Issuer)
+ }
+ tenant := ar.IDToken.TenantID
+ if tenant == "" {
+ tenant = strings.Trim(u.Path, "/")
+ }
+ username := ar.IDToken.PreferredUsername
+ if username == "" {
+ username = ar.IDToken.UPN
+ }
+ return authenticationRecord{
+ Authority: fmt.Sprintf("%s://%s", u.Scheme, u.Host),
+ ClientID: ar.IDToken.Audience,
+ HomeAccountID: ar.Account.HomeAccountID,
+ TenantID: tenant,
+ Username: username,
+ Version: "1.0",
+ }, nil
+}
@@ -0,0 +1,190 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "bytes"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "os"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/streaming"
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity/internal"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/public"
+)
+
+const (
+ azureAdditionallyAllowedTenants = "AZURE_ADDITIONALLY_ALLOWED_TENANTS"
+ azureAuthorityHost = "AZURE_AUTHORITY_HOST"
+ azureClientCertificatePassword = "AZURE_CLIENT_CERTIFICATE_PASSWORD"
+ azureClientCertificatePath = "AZURE_CLIENT_CERTIFICATE_PATH"
+ azureClientID = "AZURE_CLIENT_ID"
+ azureClientSecret = "AZURE_CLIENT_SECRET"
+ azureFederatedTokenFile = "AZURE_FEDERATED_TOKEN_FILE"
+ azurePassword = "AZURE_PASSWORD"
+ azureRegionalAuthorityName = "AZURE_REGIONAL_AUTHORITY_NAME"
+ azureTenantID = "AZURE_TENANT_ID"
+ azureUsername = "AZURE_USERNAME"
+
+ organizationsTenantID = "organizations"
+ developerSignOnClientID = "04b07795-8ddb-461a-bbee-02f9e1bf7b46"
+ defaultSuffix = "/.default"
+
+ traceNamespace = "Microsoft.Entra"
+ traceOpGetToken = "GetToken"
+ traceOpAuthenticate = "Authenticate"
+)
+
+var (
+ // capability CP1 indicates the client application is capable of handling CAE claims challenges
+ cp1 = []string{"CP1"}
+ errInvalidTenantID = errors.New("invalid tenantID. You can locate your tenantID by following the instructions listed here: https://learn.microsoft.com/partner-center/find-ids-and-domain-names")
+)
+
+// tokenCachePersistenceOptions contains options for persistent token caching
+type tokenCachePersistenceOptions = internal.TokenCachePersistenceOptions
+
+// setAuthorityHost initializes the authority host for credentials. Precedence is:
+// 1. cloud.Configuration.ActiveDirectoryAuthorityHost value set by user
+// 2. value of AZURE_AUTHORITY_HOST
+// 3. default: Azure Public Cloud
+func setAuthorityHost(cc cloud.Configuration) (string, error) {
+ host := cc.ActiveDirectoryAuthorityHost
+ if host == "" {
+ if len(cc.Services) > 0 {
+ return "", errors.New("missing ActiveDirectoryAuthorityHost for specified cloud")
+ }
+ host = cloud.AzurePublic.ActiveDirectoryAuthorityHost
+ if envAuthorityHost := os.Getenv(azureAuthorityHost); envAuthorityHost != "" {
+ host = envAuthorityHost
+ }
+ }
+ u, err := url.Parse(host)
+ if err != nil {
+ return "", err
+ }
+ if u.Scheme != "https" {
+ return "", errors.New("cannot use an authority host without https")
+ }
+ return host, nil
+}
+
+// resolveAdditionalTenants returns a copy of tenants, simplified when tenants contains a wildcard
+func resolveAdditionalTenants(tenants []string) []string {
+ if len(tenants) == 0 {
+ return nil
+ }
+ for _, t := range tenants {
+ // a wildcard makes all other values redundant
+ if t == "*" {
+ return []string{"*"}
+ }
+ }
+ cp := make([]string, len(tenants))
+ copy(cp, tenants)
+ return cp
+}
+
+// resolveTenant returns the correct tenant for a token request
+func resolveTenant(defaultTenant, specified, credName string, additionalTenants []string) (string, error) {
+ if specified == "" || specified == defaultTenant {
+ return defaultTenant, nil
+ }
+ if defaultTenant == "adfs" {
+ return "", errors.New("ADFS doesn't support tenants")
+ }
+ if !validTenantID(specified) {
+ return "", errInvalidTenantID
+ }
+ for _, t := range additionalTenants {
+ if t == "*" || t == specified {
+ return specified, nil
+ }
+ }
+ return "", fmt.Errorf(`%s isn't configured to acquire tokens for tenant %q. To enable acquiring tokens for this tenant add it to the AdditionallyAllowedTenants on the credential options, or add "*" to allow acquiring tokens for any tenant`, credName, specified)
+}
+
+func alphanumeric(r rune) bool {
+ return ('0' <= r && r <= '9') || ('a' <= r && r <= 'z') || ('A' <= r && r <= 'Z')
+}
+
+func validTenantID(tenantID string) bool {
+ if len(tenantID) < 1 {
+ return false
+ }
+ for _, r := range tenantID {
+ if !(alphanumeric(r) || r == '.' || r == '-') {
+ return false
+ }
+ }
+ return true
+}
+
+func doForClient(client *azcore.Client, r *http.Request) (*http.Response, error) {
+ req, err := runtime.NewRequest(r.Context(), r.Method, r.URL.String())
+ if err != nil {
+ return nil, err
+ }
+ if r.Body != nil && r.Body != http.NoBody {
+ // create a rewindable body from the existing body as required
+ var body io.ReadSeekCloser
+ if rsc, ok := r.Body.(io.ReadSeekCloser); ok {
+ body = rsc
+ } else {
+ b, err := io.ReadAll(r.Body)
+ if err != nil {
+ return nil, err
+ }
+ body = streaming.NopCloser(bytes.NewReader(b))
+ }
+ err = req.SetBody(body, r.Header.Get("Content-Type"))
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ // copy headers to the new request, ignoring any for which the new request has a value
+ h := req.Raw().Header
+ for key, vals := range r.Header {
+ if _, has := h[key]; !has {
+ for _, val := range vals {
+ h.Add(key, val)
+ }
+ }
+ }
+
+ resp, err := client.Pipeline().Do(req)
+ if err != nil {
+ return nil, err
+ }
+ return resp, err
+}
+
+// enables fakes for test scenarios
+type msalConfidentialClient interface {
+ AcquireTokenSilent(ctx context.Context, scopes []string, options ...confidential.AcquireSilentOption) (confidential.AuthResult, error)
+ AcquireTokenByAuthCode(ctx context.Context, code string, redirectURI string, scopes []string, options ...confidential.AcquireByAuthCodeOption) (confidential.AuthResult, error)
+ AcquireTokenByCredential(ctx context.Context, scopes []string, options ...confidential.AcquireByCredentialOption) (confidential.AuthResult, error)
+ AcquireTokenOnBehalfOf(ctx context.Context, userAssertion string, scopes []string, options ...confidential.AcquireOnBehalfOfOption) (confidential.AuthResult, error)
+}
+
+// enables fakes for test scenarios
+type msalPublicClient interface {
+ AcquireTokenSilent(ctx context.Context, scopes []string, options ...public.AcquireSilentOption) (public.AuthResult, error)
+ AcquireTokenByUsernamePassword(ctx context.Context, scopes []string, username string, password string, options ...public.AcquireByUsernamePasswordOption) (public.AuthResult, error)
+ AcquireTokenByDeviceCode(ctx context.Context, scopes []string, options ...public.AcquireByDeviceCodeOption) (public.DeviceCode, error)
+ AcquireTokenByAuthCode(ctx context.Context, code string, redirectURI string, scopes []string, options ...public.AcquireByAuthCodeOption) (public.AuthResult, error)
+ AcquireTokenInteractive(ctx context.Context, scopes []string, options ...public.AcquireInteractiveOption) (public.AuthResult, error)
+}
@@ -0,0 +1,190 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "os"
+ "os/exec"
+ "runtime"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+)
+
+const credNameAzureCLI = "AzureCLICredential"
+
+type azTokenProvider func(ctx context.Context, scopes []string, tenant, subscription string) ([]byte, error)
+
+// AzureCLICredentialOptions contains optional parameters for AzureCLICredential.
+type AzureCLICredentialOptions struct {
+ // AdditionallyAllowedTenants specifies tenants for which the credential may acquire tokens, in addition
+ // to TenantID. Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the
+ // logged in account can access.
+ AdditionallyAllowedTenants []string
+
+ // Subscription is the name or ID of a subscription. Set this to acquire tokens for an account other
+ // than the Azure CLI's current account.
+ Subscription string
+
+ // TenantID identifies the tenant the credential should authenticate in.
+ // Defaults to the CLI's default tenant, which is typically the home tenant of the logged in user.
+ TenantID string
+
+ // inDefaultChain is true when the credential is part of DefaultAzureCredential
+ inDefaultChain bool
+ // tokenProvider is used by tests to fake invoking az
+ tokenProvider azTokenProvider
+}
+
+// init returns an instance of AzureCLICredentialOptions initialized with default values.
+func (o *AzureCLICredentialOptions) init() {
+ if o.tokenProvider == nil {
+ o.tokenProvider = defaultAzTokenProvider
+ }
+}
+
+// AzureCLICredential authenticates as the identity logged in to the Azure CLI.
+type AzureCLICredential struct {
+ mu *sync.Mutex
+ opts AzureCLICredentialOptions
+}
+
+// NewAzureCLICredential constructs an AzureCLICredential. Pass nil to accept default options.
+func NewAzureCLICredential(options *AzureCLICredentialOptions) (*AzureCLICredential, error) {
+ cp := AzureCLICredentialOptions{}
+ if options != nil {
+ cp = *options
+ }
+ for _, r := range cp.Subscription {
+ if !(alphanumeric(r) || r == '-' || r == '_' || r == ' ' || r == '.') {
+ return nil, fmt.Errorf("%s: invalid Subscription %q", credNameAzureCLI, cp.Subscription)
+ }
+ }
+ if cp.TenantID != "" && !validTenantID(cp.TenantID) {
+ return nil, errInvalidTenantID
+ }
+ cp.init()
+ cp.AdditionallyAllowedTenants = resolveAdditionalTenants(cp.AdditionallyAllowedTenants)
+ return &AzureCLICredential{mu: &sync.Mutex{}, opts: cp}, nil
+}
+
+// GetToken requests a token from the Azure CLI. This credential doesn't cache tokens, so every call invokes the CLI.
+// This method is called automatically by Azure SDK clients.
+func (c *AzureCLICredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ at := azcore.AccessToken{}
+ if len(opts.Scopes) != 1 {
+ return at, errors.New(credNameAzureCLI + ": GetToken() requires exactly one scope")
+ }
+ if !validScope(opts.Scopes[0]) {
+ return at, fmt.Errorf("%s.GetToken(): invalid scope %q", credNameAzureCLI, opts.Scopes[0])
+ }
+ tenant, err := resolveTenant(c.opts.TenantID, opts.TenantID, credNameAzureCLI, c.opts.AdditionallyAllowedTenants)
+ if err != nil {
+ return at, err
+ }
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ b, err := c.opts.tokenProvider(ctx, opts.Scopes, tenant, c.opts.Subscription)
+ if err == nil {
+ at, err = c.createAccessToken(b)
+ }
+ if err != nil {
+ err = unavailableIfInChain(err, c.opts.inDefaultChain)
+ return at, err
+ }
+ msg := fmt.Sprintf("%s.GetToken() acquired a token for scope %q", credNameAzureCLI, strings.Join(opts.Scopes, ", "))
+ log.Write(EventAuthentication, msg)
+ return at, nil
+}
+
+// defaultAzTokenProvider invokes the Azure CLI to acquire a token. It assumes
+// callers have verified that all string arguments are safe to pass to the CLI.
+var defaultAzTokenProvider azTokenProvider = func(ctx context.Context, scopes []string, tenantID, subscription string) ([]byte, error) {
+ // pass the CLI a Microsoft Entra ID v1 resource because we don't know which CLI version is installed and older ones don't support v2 scopes
+ resource := strings.TrimSuffix(scopes[0], defaultSuffix)
+ // set a default timeout for this authentication iff the application hasn't done so already
+ var cancel context.CancelFunc
+ if _, hasDeadline := ctx.Deadline(); !hasDeadline {
+ ctx, cancel = context.WithTimeout(ctx, cliTimeout)
+ defer cancel()
+ }
+ commandLine := "az account get-access-token -o json --resource " + resource
+ if tenantID != "" {
+ commandLine += " --tenant " + tenantID
+ }
+ if subscription != "" {
+ // subscription needs quotes because it may contain spaces
+ commandLine += ` --subscription "` + subscription + `"`
+ }
+ var cliCmd *exec.Cmd
+ if runtime.GOOS == "windows" {
+ dir := os.Getenv("SYSTEMROOT")
+ if dir == "" {
+ return nil, newCredentialUnavailableError(credNameAzureCLI, "environment variable 'SYSTEMROOT' has no value")
+ }
+ cliCmd = exec.CommandContext(ctx, "cmd.exe", "/c", commandLine)
+ cliCmd.Dir = dir
+ } else {
+ cliCmd = exec.CommandContext(ctx, "/bin/sh", "-c", commandLine)
+ cliCmd.Dir = "/bin"
+ }
+ cliCmd.Env = os.Environ()
+ var stderr bytes.Buffer
+ cliCmd.Stderr = &stderr
+
+ output, err := cliCmd.Output()
+ if err != nil {
+ msg := stderr.String()
+ var exErr *exec.ExitError
+ if errors.As(err, &exErr) && exErr.ExitCode() == 127 || strings.HasPrefix(msg, "'az' is not recognized") {
+ msg = "Azure CLI not found on path"
+ }
+ if msg == "" {
+ msg = err.Error()
+ }
+ return nil, newCredentialUnavailableError(credNameAzureCLI, msg)
+ }
+
+ return output, nil
+}
+
+func (c *AzureCLICredential) createAccessToken(tk []byte) (azcore.AccessToken, error) {
+ t := struct {
+ AccessToken string `json:"accessToken"`
+ Expires_On int64 `json:"expires_on"`
+ ExpiresOn string `json:"expiresOn"`
+ }{}
+ err := json.Unmarshal(tk, &t)
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+
+ exp := time.Unix(t.Expires_On, 0)
+ if t.Expires_On == 0 {
+ exp, err = time.ParseInLocation("2006-01-02 15:04:05.999999", t.ExpiresOn, time.Local)
+ if err != nil {
+ return azcore.AccessToken{}, fmt.Errorf("%s: error parsing token expiration time %q: %v", credNameAzureCLI, t.ExpiresOn, err)
+ }
+ }
+
+ converted := azcore.AccessToken{
+ Token: t.AccessToken,
+ ExpiresOn: exp.UTC(),
+ }
+ return converted, nil
+}
+
+var _ azcore.TokenCredential = (*AzureCLICredential)(nil)
@@ -0,0 +1,169 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "os"
+ "os/exec"
+ "runtime"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+)
+
+const credNameAzureDeveloperCLI = "AzureDeveloperCLICredential"
+
+type azdTokenProvider func(ctx context.Context, scopes []string, tenant string) ([]byte, error)
+
+// AzureDeveloperCLICredentialOptions contains optional parameters for AzureDeveloperCLICredential.
+type AzureDeveloperCLICredentialOptions struct {
+ // AdditionallyAllowedTenants specifies tenants for which the credential may acquire tokens, in addition
+ // to TenantID. Add the wildcard value "*" to allow the credential to acquire tokens for any tenant the
+ // logged in account can access.
+ AdditionallyAllowedTenants []string
+
+ // TenantID identifies the tenant the credential should authenticate in. Defaults to the azd environment,
+ // which is the tenant of the selected Azure subscription.
+ TenantID string
+
+ // inDefaultChain is true when the credential is part of DefaultAzureCredential
+ inDefaultChain bool
+ // tokenProvider is used by tests to fake invoking azd
+ tokenProvider azdTokenProvider
+}
+
+// AzureDeveloperCLICredential authenticates as the identity logged in to the [Azure Developer CLI].
+//
+// [Azure Developer CLI]: https://learn.microsoft.com/azure/developer/azure-developer-cli/overview
+type AzureDeveloperCLICredential struct {
+ mu *sync.Mutex
+ opts AzureDeveloperCLICredentialOptions
+}
+
+// NewAzureDeveloperCLICredential constructs an AzureDeveloperCLICredential. Pass nil to accept default options.
+func NewAzureDeveloperCLICredential(options *AzureDeveloperCLICredentialOptions) (*AzureDeveloperCLICredential, error) {
+ cp := AzureDeveloperCLICredentialOptions{}
+ if options != nil {
+ cp = *options
+ }
+ if cp.TenantID != "" && !validTenantID(cp.TenantID) {
+ return nil, errInvalidTenantID
+ }
+ if cp.tokenProvider == nil {
+ cp.tokenProvider = defaultAzdTokenProvider
+ }
+ return &AzureDeveloperCLICredential{mu: &sync.Mutex{}, opts: cp}, nil
+}
+
+// GetToken requests a token from the Azure Developer CLI. This credential doesn't cache tokens, so every call invokes azd.
+// This method is called automatically by Azure SDK clients.
+func (c *AzureDeveloperCLICredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ at := azcore.AccessToken{}
+ if len(opts.Scopes) == 0 {
+ return at, errors.New(credNameAzureDeveloperCLI + ": GetToken() requires at least one scope")
+ }
+ for _, scope := range opts.Scopes {
+ if !validScope(scope) {
+ return at, fmt.Errorf("%s.GetToken(): invalid scope %q", credNameAzureDeveloperCLI, scope)
+ }
+ }
+ tenant, err := resolveTenant(c.opts.TenantID, opts.TenantID, credNameAzureDeveloperCLI, c.opts.AdditionallyAllowedTenants)
+ if err != nil {
+ return at, err
+ }
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ b, err := c.opts.tokenProvider(ctx, opts.Scopes, tenant)
+ if err == nil {
+ at, err = c.createAccessToken(b)
+ }
+ if err != nil {
+ err = unavailableIfInChain(err, c.opts.inDefaultChain)
+ return at, err
+ }
+ msg := fmt.Sprintf("%s.GetToken() acquired a token for scope %q", credNameAzureDeveloperCLI, strings.Join(opts.Scopes, ", "))
+ log.Write(EventAuthentication, msg)
+ return at, nil
+}
+
+// defaultAzTokenProvider invokes the Azure Developer CLI to acquire a token. It assumes
+// callers have verified that all string arguments are safe to pass to the CLI.
+var defaultAzdTokenProvider azdTokenProvider = func(ctx context.Context, scopes []string, tenant string) ([]byte, error) {
+ // set a default timeout for this authentication iff the application hasn't done so already
+ var cancel context.CancelFunc
+ if _, hasDeadline := ctx.Deadline(); !hasDeadline {
+ ctx, cancel = context.WithTimeout(ctx, cliTimeout)
+ defer cancel()
+ }
+ commandLine := "azd auth token -o json"
+ if tenant != "" {
+ commandLine += " --tenant-id " + tenant
+ }
+ for _, scope := range scopes {
+ commandLine += " --scope " + scope
+ }
+ var cliCmd *exec.Cmd
+ if runtime.GOOS == "windows" {
+ dir := os.Getenv("SYSTEMROOT")
+ if dir == "" {
+ return nil, newCredentialUnavailableError(credNameAzureDeveloperCLI, "environment variable 'SYSTEMROOT' has no value")
+ }
+ cliCmd = exec.CommandContext(ctx, "cmd.exe", "/c", commandLine)
+ cliCmd.Dir = dir
+ } else {
+ cliCmd = exec.CommandContext(ctx, "/bin/sh", "-c", commandLine)
+ cliCmd.Dir = "/bin"
+ }
+ cliCmd.Env = os.Environ()
+ var stderr bytes.Buffer
+ cliCmd.Stderr = &stderr
+ output, err := cliCmd.Output()
+ if err != nil {
+ msg := stderr.String()
+ var exErr *exec.ExitError
+ if errors.As(err, &exErr) && exErr.ExitCode() == 127 || strings.HasPrefix(msg, "'azd' is not recognized") {
+ msg = "Azure Developer CLI not found on path"
+ } else if strings.Contains(msg, "azd auth login") {
+ msg = `please run "azd auth login" from a command prompt to authenticate before using this credential`
+ }
+ if msg == "" {
+ msg = err.Error()
+ }
+ return nil, newCredentialUnavailableError(credNameAzureDeveloperCLI, msg)
+ }
+ return output, nil
+}
+
+func (c *AzureDeveloperCLICredential) createAccessToken(tk []byte) (azcore.AccessToken, error) {
+ t := struct {
+ AccessToken string `json:"token"`
+ ExpiresOn string `json:"expiresOn"`
+ }{}
+ err := json.Unmarshal(tk, &t)
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+ exp, err := time.Parse("2006-01-02T15:04:05Z", t.ExpiresOn)
+ if err != nil {
+ return azcore.AccessToken{}, fmt.Errorf("error parsing token expiration time %q: %v", t.ExpiresOn, err)
+ }
+ return azcore.AccessToken{
+ ExpiresOn: exp.UTC(),
+ Token: t.AccessToken,
+ }, nil
+}
+
+var _ azcore.TokenCredential = (*AzureDeveloperCLICredential)(nil)
@@ -0,0 +1,140 @@
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "os"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+)
+
+const (
+ credNameAzurePipelines = "AzurePipelinesCredential"
+ oidcAPIVersion = "7.1"
+ systemOIDCRequestURI = "SYSTEM_OIDCREQUESTURI"
+)
+
+// AzurePipelinesCredential authenticates with workload identity federation in an Azure Pipeline. See
+// [Azure Pipelines documentation] for more information.
+//
+// [Azure Pipelines documentation]: https://learn.microsoft.com/azure/devops/pipelines/library/connect-to-azure?view=azure-devops#create-an-azure-resource-manager-service-connection-that-uses-workload-identity-federation
+type AzurePipelinesCredential struct {
+ connectionID, oidcURI, systemAccessToken string
+ cred *ClientAssertionCredential
+}
+
+// AzurePipelinesCredentialOptions contains optional parameters for AzurePipelinesCredential.
+type AzurePipelinesCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire tokens.
+ // Add the wildcard value "*" to allow the credential to acquire tokens for any tenant in which the
+ // application is registered.
+ AdditionallyAllowedTenants []string
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+}
+
+// NewAzurePipelinesCredential is the constructor for AzurePipelinesCredential.
+//
+// - tenantID: tenant ID of the service principal federated with the service connection
+// - clientID: client ID of that service principal
+// - serviceConnectionID: ID of the service connection to authenticate
+// - systemAccessToken: security token for the running build. See [Azure Pipelines documentation] for
+// an example showing how to get this value.
+//
+// [Azure Pipelines documentation]: https://learn.microsoft.com/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken
+func NewAzurePipelinesCredential(tenantID, clientID, serviceConnectionID, systemAccessToken string, options *AzurePipelinesCredentialOptions) (*AzurePipelinesCredential, error) {
+ if !validTenantID(tenantID) {
+ return nil, errInvalidTenantID
+ }
+ if clientID == "" {
+ return nil, errors.New("no client ID specified")
+ }
+ if serviceConnectionID == "" {
+ return nil, errors.New("no service connection ID specified")
+ }
+ if systemAccessToken == "" {
+ return nil, errors.New("no system access token specified")
+ }
+ u := os.Getenv(systemOIDCRequestURI)
+ if u == "" {
+ return nil, fmt.Errorf("no value for environment variable %s. This should be set by Azure Pipelines", systemOIDCRequestURI)
+ }
+ a := AzurePipelinesCredential{
+ connectionID: serviceConnectionID,
+ oidcURI: u,
+ systemAccessToken: systemAccessToken,
+ }
+ if options == nil {
+ options = &AzurePipelinesCredentialOptions{}
+ }
+ caco := ClientAssertionCredentialOptions{
+ AdditionallyAllowedTenants: options.AdditionallyAllowedTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ }
+ cred, err := NewClientAssertionCredential(tenantID, clientID, a.getAssertion, &caco)
+ if err != nil {
+ return nil, err
+ }
+ cred.client.name = credNameAzurePipelines
+ a.cred = cred
+ return &a, nil
+}
+
+// GetToken requests an access token from Microsoft Entra ID. Azure SDK clients call this method automatically.
+func (a *AzurePipelinesCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameAzurePipelines+"."+traceOpGetToken, a.cred.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := a.cred.GetToken(ctx, opts)
+ return tk, err
+}
+
+func (a *AzurePipelinesCredential) getAssertion(ctx context.Context) (string, error) {
+ url := a.oidcURI + "?api-version=" + oidcAPIVersion + "&serviceConnectionId=" + a.connectionID
+ url, err := runtime.EncodeQueryParams(url)
+ if err != nil {
+ return "", newAuthenticationFailedError(credNameAzurePipelines, "couldn't encode OIDC URL: "+err.Error(), nil, nil)
+ }
+ req, err := http.NewRequestWithContext(ctx, http.MethodPost, url, nil)
+ if err != nil {
+ return "", newAuthenticationFailedError(credNameAzurePipelines, "couldn't create OIDC token request: "+err.Error(), nil, nil)
+ }
+ req.Header.Set("Authorization", "Bearer "+a.systemAccessToken)
+ res, err := doForClient(a.cred.client.azClient, req)
+ if err != nil {
+ return "", newAuthenticationFailedError(credNameAzurePipelines, "couldn't send OIDC token request: "+err.Error(), nil, nil)
+ }
+ if res.StatusCode != http.StatusOK {
+ msg := res.Status + " response from the OIDC endpoint. Check service connection ID and Pipeline configuration"
+ // include the response because its body, if any, probably contains an error message.
+ // OK responses aren't included with errors because they probably contain secrets
+ return "", newAuthenticationFailedError(credNameAzurePipelines, msg, res, nil)
+ }
+ b, err := runtime.Payload(res)
+ if err != nil {
+ return "", newAuthenticationFailedError(credNameAzurePipelines, "couldn't read OIDC response content: "+err.Error(), nil, nil)
+ }
+ var r struct {
+ OIDCToken string `json:"oidcToken"`
+ }
+ err = json.Unmarshal(b, &r)
+ if err != nil {
+ return "", newAuthenticationFailedError(credNameAzurePipelines, "unexpected response from OIDC endpoint", nil, nil)
+ }
+ return r.OIDCToken, nil
+}
@@ -0,0 +1,138 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+ "sync"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+)
+
+// ChainedTokenCredentialOptions contains optional parameters for ChainedTokenCredential.
+type ChainedTokenCredentialOptions struct {
+ // RetrySources configures how the credential uses its sources. When true, the credential always attempts to
+ // authenticate through each source in turn, stopping when one succeeds. When false, the credential authenticates
+ // only through this first successful source--it never again tries the sources which failed.
+ RetrySources bool
+}
+
+// ChainedTokenCredential links together multiple credentials and tries them sequentially when authenticating. By default,
+// it tries all the credentials until one authenticates, after which it always uses that credential.
+type ChainedTokenCredential struct {
+ cond *sync.Cond
+ iterating bool
+ name string
+ retrySources bool
+ sources []azcore.TokenCredential
+ successfulCredential azcore.TokenCredential
+}
+
+// NewChainedTokenCredential creates a ChainedTokenCredential. Pass nil for options to accept defaults.
+func NewChainedTokenCredential(sources []azcore.TokenCredential, options *ChainedTokenCredentialOptions) (*ChainedTokenCredential, error) {
+ if len(sources) == 0 {
+ return nil, errors.New("sources must contain at least one TokenCredential")
+ }
+ for _, source := range sources {
+ if source == nil { // cannot have a nil credential in the chain or else the application will panic when GetToken() is called on nil
+ return nil, errors.New("sources cannot contain nil")
+ }
+ }
+ cp := make([]azcore.TokenCredential, len(sources))
+ copy(cp, sources)
+ if options == nil {
+ options = &ChainedTokenCredentialOptions{}
+ }
+ return &ChainedTokenCredential{
+ cond: sync.NewCond(&sync.Mutex{}),
+ name: "ChainedTokenCredential",
+ retrySources: options.RetrySources,
+ sources: cp,
+ }, nil
+}
+
+// GetToken calls GetToken on the chained credentials in turn, stopping when one returns a token.
+// This method is called automatically by Azure SDK clients.
+func (c *ChainedTokenCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ if !c.retrySources {
+ // ensure only one goroutine at a time iterates the sources and perhaps sets c.successfulCredential
+ c.cond.L.Lock()
+ for {
+ if c.successfulCredential != nil {
+ c.cond.L.Unlock()
+ return c.successfulCredential.GetToken(ctx, opts)
+ }
+ if !c.iterating {
+ c.iterating = true
+ // allow other goroutines to wait while this one iterates
+ c.cond.L.Unlock()
+ break
+ }
+ c.cond.Wait()
+ }
+ }
+
+ var (
+ err error
+ errs []error
+ successfulCredential azcore.TokenCredential
+ token azcore.AccessToken
+ unavailableErr credentialUnavailable
+ )
+ for _, cred := range c.sources {
+ token, err = cred.GetToken(ctx, opts)
+ if err == nil {
+ log.Writef(EventAuthentication, "%s authenticated with %s", c.name, extractCredentialName(cred))
+ successfulCredential = cred
+ break
+ }
+ errs = append(errs, err)
+ // continue to the next source iff this one returned credentialUnavailableError
+ if !errors.As(err, &unavailableErr) {
+ break
+ }
+ }
+ if c.iterating {
+ c.cond.L.Lock()
+ // this is nil when all credentials returned an error
+ c.successfulCredential = successfulCredential
+ c.iterating = false
+ c.cond.L.Unlock()
+ c.cond.Broadcast()
+ }
+ // err is the error returned by the last GetToken call. It will be nil when that call succeeds
+ if err != nil {
+ // return credentialUnavailableError iff all sources did so; return AuthenticationFailedError otherwise
+ msg := createChainedErrorMessage(errs)
+ if errors.As(err, &unavailableErr) {
+ err = newCredentialUnavailableError(c.name, msg)
+ } else {
+ res := getResponseFromError(err)
+ err = newAuthenticationFailedError(c.name, msg, res, err)
+ }
+ }
+ return token, err
+}
+
+func createChainedErrorMessage(errs []error) string {
+ msg := "failed to acquire a token.\nAttempted credentials:"
+ for _, err := range errs {
+ msg += fmt.Sprintf("\n\t%s", err.Error())
+ }
+ return msg
+}
+
+func extractCredentialName(credential azcore.TokenCredential) string {
+ return strings.TrimPrefix(fmt.Sprintf("%T", credential), "*azidentity.")
+}
+
+var _ azcore.TokenCredential = (*ChainedTokenCredential)(nil)
@@ -0,0 +1,46 @@
+# NOTE: Please refer to https://aka.ms/azsdk/engsys/ci-yaml before editing this file.
+trigger:
+ branches:
+ include:
+ - main
+ - feature/*
+ - hotfix/*
+ - release/*
+ paths:
+ include:
+ - sdk/azidentity/
+
+pr:
+ branches:
+ include:
+ - main
+ - feature/*
+ - hotfix/*
+ - release/*
+ paths:
+ include:
+ - sdk/azidentity/
+
+extends:
+ template: /eng/pipelines/templates/jobs/archetype-sdk-client.yml
+ parameters:
+ CloudConfig:
+ Public:
+ SubscriptionConfigurations:
+ - $(sub-config-azure-cloud-test-resources)
+ - $(sub-config-identity-test-resources)
+ EnvVars:
+ SYSTEM_ACCESSTOKEN: $(System.AccessToken)
+ RunLiveTests: true
+ ServiceDirectory: azidentity
+ UsePipelineProxy: false
+
+ ${{ if endsWith(variables['Build.DefinitionName'], 'weekly') }}:
+ MatrixConfigs:
+ - Name: managed_identity_matrix
+ GenerateVMJobs: true
+ Path: sdk/azidentity/managed-identity-matrix.json
+ Selection: sparse
+ MatrixReplace:
+ - Pool=.*LINUXPOOL.*/azsdk-pool-mms-ubuntu-2204-identitymsi
+ - OSVmImage=.*LINUXNEXTVMIMAGE.*/azsdk-pool-mms-ubuntu-2204-1espt
@@ -0,0 +1,85 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "errors"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential"
+)
+
+const credNameAssertion = "ClientAssertionCredential"
+
+// ClientAssertionCredential authenticates an application with assertions provided by a callback function.
+// This credential is for advanced scenarios. [ClientCertificateCredential] has a more convenient API for
+// the most common assertion scenario, authenticating a service principal with a certificate. See
+// [Microsoft Entra ID documentation] for details of the assertion format.
+//
+// [Microsoft Entra ID documentation]: https://learn.microsoft.com/entra/identity-platform/certificate-credentials#assertion-format
+type ClientAssertionCredential struct {
+ client *confidentialClient
+}
+
+// ClientAssertionCredentialOptions contains optional parameters for ClientAssertionCredential.
+type ClientAssertionCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire tokens.
+ // Add the wildcard value "*" to allow the credential to acquire tokens for any tenant in which the
+ // application is registered.
+ AdditionallyAllowedTenants []string
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+
+ // tokenCachePersistenceOptions enables persistent token caching when not nil.
+ tokenCachePersistenceOptions *tokenCachePersistenceOptions
+}
+
+// NewClientAssertionCredential constructs a ClientAssertionCredential. The getAssertion function must be thread safe. Pass nil for options to accept defaults.
+func NewClientAssertionCredential(tenantID, clientID string, getAssertion func(context.Context) (string, error), options *ClientAssertionCredentialOptions) (*ClientAssertionCredential, error) {
+ if getAssertion == nil {
+ return nil, errors.New("getAssertion must be a function that returns assertions")
+ }
+ if options == nil {
+ options = &ClientAssertionCredentialOptions{}
+ }
+ cred := confidential.NewCredFromAssertionCallback(
+ func(ctx context.Context, _ confidential.AssertionRequestOptions) (string, error) {
+ return getAssertion(ctx)
+ },
+ )
+ msalOpts := confidentialClientOptions{
+ AdditionallyAllowedTenants: options.AdditionallyAllowedTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ tokenCachePersistenceOptions: options.tokenCachePersistenceOptions,
+ }
+ c, err := newConfidentialClient(tenantID, clientID, credNameAssertion, cred, msalOpts)
+ if err != nil {
+ return nil, err
+ }
+ return &ClientAssertionCredential{client: c}, nil
+}
+
+// GetToken requests an access token from Microsoft Entra ID. This method is called automatically by Azure SDK clients.
+func (c *ClientAssertionCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameAssertion+"."+traceOpGetToken, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.GetToken(ctx, opts)
+ return tk, err
+}
+
+var _ azcore.TokenCredential = (*ClientAssertionCredential)(nil)
@@ -0,0 +1,174 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "crypto"
+ "crypto/x509"
+ "encoding/pem"
+ "errors"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential"
+ "golang.org/x/crypto/pkcs12"
+)
+
+const credNameCert = "ClientCertificateCredential"
+
+// ClientCertificateCredentialOptions contains optional parameters for ClientCertificateCredential.
+type ClientCertificateCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire tokens.
+ // Add the wildcard value "*" to allow the credential to acquire tokens for any tenant in which the
+ // application is registered.
+ AdditionallyAllowedTenants []string
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+
+ // SendCertificateChain controls whether the credential sends the public certificate chain in the x5c
+ // header of each token request's JWT. This is required for Subject Name/Issuer (SNI) authentication.
+ // Defaults to False.
+ SendCertificateChain bool
+
+ // tokenCachePersistenceOptions enables persistent token caching when not nil.
+ tokenCachePersistenceOptions *tokenCachePersistenceOptions
+}
+
+// ClientCertificateCredential authenticates a service principal with a certificate.
+type ClientCertificateCredential struct {
+ client *confidentialClient
+}
+
+// NewClientCertificateCredential constructs a ClientCertificateCredential. Pass nil for options to accept defaults. See
+// [ParseCertificates] for help loading a certificate.
+func NewClientCertificateCredential(tenantID string, clientID string, certs []*x509.Certificate, key crypto.PrivateKey, options *ClientCertificateCredentialOptions) (*ClientCertificateCredential, error) {
+ if len(certs) == 0 {
+ return nil, errors.New("at least one certificate is required")
+ }
+ if options == nil {
+ options = &ClientCertificateCredentialOptions{}
+ }
+ cred, err := confidential.NewCredFromCert(certs, key)
+ if err != nil {
+ return nil, err
+ }
+ msalOpts := confidentialClientOptions{
+ AdditionallyAllowedTenants: options.AdditionallyAllowedTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ SendX5C: options.SendCertificateChain,
+ tokenCachePersistenceOptions: options.tokenCachePersistenceOptions,
+ }
+ c, err := newConfidentialClient(tenantID, clientID, credNameCert, cred, msalOpts)
+ if err != nil {
+ return nil, err
+ }
+ return &ClientCertificateCredential{client: c}, nil
+}
+
+// GetToken requests an access token from Microsoft Entra ID. This method is called automatically by Azure SDK clients.
+func (c *ClientCertificateCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameCert+"."+traceOpGetToken, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.GetToken(ctx, opts)
+ return tk, err
+}
+
+// ParseCertificates loads certificates and a private key, in PEM or PKCS#12 format, for use with [NewClientCertificateCredential].
+// Pass nil for password if the private key isn't encrypted. This function has limitations, for example it can't decrypt keys in
+// PEM format or PKCS#12 certificates that use SHA256 for message authentication. If you encounter such limitations, consider
+// using another module to load the certificate and private key.
+func ParseCertificates(certData []byte, password []byte) ([]*x509.Certificate, crypto.PrivateKey, error) {
+ var blocks []*pem.Block
+ var err error
+ if len(password) == 0 {
+ blocks, err = loadPEMCert(certData)
+ }
+ if len(blocks) == 0 || err != nil {
+ blocks, err = loadPKCS12Cert(certData, string(password))
+ }
+ if err != nil {
+ return nil, nil, err
+ }
+ var certs []*x509.Certificate
+ var pk crypto.PrivateKey
+ for _, block := range blocks {
+ switch block.Type {
+ case "CERTIFICATE":
+ c, err := x509.ParseCertificate(block.Bytes)
+ if err != nil {
+ return nil, nil, err
+ }
+ certs = append(certs, c)
+ case "PRIVATE KEY":
+ if pk != nil {
+ return nil, nil, errors.New("certData contains multiple private keys")
+ }
+ pk, err = x509.ParsePKCS8PrivateKey(block.Bytes)
+ if err != nil {
+ pk, err = x509.ParsePKCS1PrivateKey(block.Bytes)
+ }
+ if err != nil {
+ return nil, nil, err
+ }
+ case "RSA PRIVATE KEY":
+ if pk != nil {
+ return nil, nil, errors.New("certData contains multiple private keys")
+ }
+ pk, err = x509.ParsePKCS1PrivateKey(block.Bytes)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+ }
+ if len(certs) == 0 {
+ return nil, nil, errors.New("found no certificate")
+ }
+ if pk == nil {
+ return nil, nil, errors.New("found no private key")
+ }
+ return certs, pk, nil
+}
+
+func loadPEMCert(certData []byte) ([]*pem.Block, error) {
+ blocks := []*pem.Block{}
+ for {
+ var block *pem.Block
+ block, certData = pem.Decode(certData)
+ if block == nil {
+ break
+ }
+ blocks = append(blocks, block)
+ }
+ if len(blocks) == 0 {
+ return nil, errors.New("didn't find any PEM blocks")
+ }
+ return blocks, nil
+}
+
+func loadPKCS12Cert(certData []byte, password string) ([]*pem.Block, error) {
+ blocks, err := pkcs12.ToPEM(certData, password)
+ if err != nil {
+ return nil, err
+ }
+ if len(blocks) == 0 {
+ // not mentioning PKCS12 in this message because we end up here when certData is garbage
+ return nil, errors.New("didn't find any certificate content")
+ }
+ return blocks, err
+}
+
+var _ azcore.TokenCredential = (*ClientCertificateCredential)(nil)
@@ -0,0 +1,75 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential"
+)
+
+const credNameSecret = "ClientSecretCredential"
+
+// ClientSecretCredentialOptions contains optional parameters for ClientSecretCredential.
+type ClientSecretCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire tokens.
+ // Add the wildcard value "*" to allow the credential to acquire tokens for any tenant in which the
+ // application is registered.
+ AdditionallyAllowedTenants []string
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+
+ // tokenCachePersistenceOptions enables persistent token caching when not nil.
+ tokenCachePersistenceOptions *tokenCachePersistenceOptions
+}
+
+// ClientSecretCredential authenticates an application with a client secret.
+type ClientSecretCredential struct {
+ client *confidentialClient
+}
+
+// NewClientSecretCredential constructs a ClientSecretCredential. Pass nil for options to accept defaults.
+func NewClientSecretCredential(tenantID string, clientID string, clientSecret string, options *ClientSecretCredentialOptions) (*ClientSecretCredential, error) {
+ if options == nil {
+ options = &ClientSecretCredentialOptions{}
+ }
+ cred, err := confidential.NewCredFromSecret(clientSecret)
+ if err != nil {
+ return nil, err
+ }
+ msalOpts := confidentialClientOptions{
+ AdditionallyAllowedTenants: options.AdditionallyAllowedTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ tokenCachePersistenceOptions: options.tokenCachePersistenceOptions,
+ }
+ c, err := newConfidentialClient(tenantID, clientID, credNameSecret, cred, msalOpts)
+ if err != nil {
+ return nil, err
+ }
+ return &ClientSecretCredential{client: c}, nil
+}
+
+// GetToken requests an access token from Microsoft Entra ID. This method is called automatically by Azure SDK clients.
+func (c *ClientSecretCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameSecret+"."+traceOpGetToken, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.GetToken(ctx, opts)
+ return tk, err
+}
+
+var _ azcore.TokenCredential = (*ClientSecretCredential)(nil)
@@ -0,0 +1,184 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "os"
+ "strings"
+ "sync"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity/internal"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential"
+)
+
+type confidentialClientOptions struct {
+ azcore.ClientOptions
+
+ AdditionallyAllowedTenants []string
+ // Assertion for on-behalf-of authentication
+ Assertion string
+ DisableInstanceDiscovery, SendX5C bool
+ tokenCachePersistenceOptions *tokenCachePersistenceOptions
+}
+
+// confidentialClient wraps the MSAL confidential client
+type confidentialClient struct {
+ cae, noCAE msalConfidentialClient
+ caeMu, noCAEMu, clientMu *sync.Mutex
+ clientID, tenantID string
+ cred confidential.Credential
+ host string
+ name string
+ opts confidentialClientOptions
+ region string
+ azClient *azcore.Client
+}
+
+func newConfidentialClient(tenantID, clientID, name string, cred confidential.Credential, opts confidentialClientOptions) (*confidentialClient, error) {
+ if !validTenantID(tenantID) {
+ return nil, errInvalidTenantID
+ }
+ host, err := setAuthorityHost(opts.Cloud)
+ if err != nil {
+ return nil, err
+ }
+ client, err := azcore.NewClient(module, version, runtime.PipelineOptions{
+ Tracing: runtime.TracingOptions{
+ Namespace: traceNamespace,
+ },
+ }, &opts.ClientOptions)
+ if err != nil {
+ return nil, err
+ }
+ opts.AdditionallyAllowedTenants = resolveAdditionalTenants(opts.AdditionallyAllowedTenants)
+ return &confidentialClient{
+ caeMu: &sync.Mutex{},
+ clientID: clientID,
+ clientMu: &sync.Mutex{},
+ cred: cred,
+ host: host,
+ name: name,
+ noCAEMu: &sync.Mutex{},
+ opts: opts,
+ region: os.Getenv(azureRegionalAuthorityName),
+ tenantID: tenantID,
+ azClient: client,
+ }, nil
+}
+
+// GetToken requests an access token from MSAL, checking the cache first.
+func (c *confidentialClient) GetToken(ctx context.Context, tro policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ if len(tro.Scopes) < 1 {
+ return azcore.AccessToken{}, fmt.Errorf("%s.GetToken() requires at least one scope", c.name)
+ }
+ // we don't resolve the tenant for managed identities because they acquire tokens only from their home tenants
+ if c.name != credNameManagedIdentity {
+ tenant, err := c.resolveTenant(tro.TenantID)
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+ tro.TenantID = tenant
+ }
+ client, mu, err := c.client(tro)
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ var ar confidential.AuthResult
+ if c.opts.Assertion != "" {
+ ar, err = client.AcquireTokenOnBehalfOf(ctx, c.opts.Assertion, tro.Scopes, confidential.WithClaims(tro.Claims), confidential.WithTenantID(tro.TenantID))
+ } else {
+ ar, err = client.AcquireTokenSilent(ctx, tro.Scopes, confidential.WithClaims(tro.Claims), confidential.WithTenantID(tro.TenantID))
+ if err != nil {
+ ar, err = client.AcquireTokenByCredential(ctx, tro.Scopes, confidential.WithClaims(tro.Claims), confidential.WithTenantID(tro.TenantID))
+ }
+ }
+ if err != nil {
+ // We could get a credentialUnavailableError from managed identity authentication because in that case the error comes from our code.
+ // We return it directly because it affects the behavior of credential chains. Otherwise, we return AuthenticationFailedError.
+ var unavailableErr credentialUnavailable
+ if !errors.As(err, &unavailableErr) {
+ res := getResponseFromError(err)
+ err = newAuthenticationFailedError(c.name, err.Error(), res, err)
+ }
+ } else {
+ msg := fmt.Sprintf("%s.GetToken() acquired a token for scope %q", c.name, strings.Join(ar.GrantedScopes, ", "))
+ log.Write(EventAuthentication, msg)
+ }
+ return azcore.AccessToken{Token: ar.AccessToken, ExpiresOn: ar.ExpiresOn.UTC()}, err
+}
+
+func (c *confidentialClient) client(tro policy.TokenRequestOptions) (msalConfidentialClient, *sync.Mutex, error) {
+ c.clientMu.Lock()
+ defer c.clientMu.Unlock()
+ if tro.EnableCAE {
+ if c.cae == nil {
+ client, err := c.newMSALClient(true)
+ if err != nil {
+ return nil, nil, err
+ }
+ c.cae = client
+ }
+ return c.cae, c.caeMu, nil
+ }
+ if c.noCAE == nil {
+ client, err := c.newMSALClient(false)
+ if err != nil {
+ return nil, nil, err
+ }
+ c.noCAE = client
+ }
+ return c.noCAE, c.noCAEMu, nil
+}
+
+func (c *confidentialClient) newMSALClient(enableCAE bool) (msalConfidentialClient, error) {
+ cache, err := internal.NewCache(c.opts.tokenCachePersistenceOptions, enableCAE)
+ if err != nil {
+ return nil, err
+ }
+ authority := runtime.JoinPaths(c.host, c.tenantID)
+ o := []confidential.Option{
+ confidential.WithAzureRegion(c.region),
+ confidential.WithCache(cache),
+ confidential.WithHTTPClient(c),
+ }
+ if enableCAE {
+ o = append(o, confidential.WithClientCapabilities(cp1))
+ }
+ if c.opts.SendX5C {
+ o = append(o, confidential.WithX5C())
+ }
+ if c.opts.DisableInstanceDiscovery || strings.ToLower(c.tenantID) == "adfs" {
+ o = append(o, confidential.WithInstanceDiscovery(false))
+ }
+ return confidential.New(authority, c.clientID, c.cred, o...)
+}
+
+// resolveTenant returns the correct WithTenantID() argument for a token request given the client's
+// configuration, or an error when that configuration doesn't allow the specified tenant
+func (c *confidentialClient) resolveTenant(specified string) (string, error) {
+ return resolveTenant(c.tenantID, specified, c.name, c.opts.AdditionallyAllowedTenants)
+}
+
+// these methods satisfy the MSAL ops.HTTPClient interface
+
+func (c *confidentialClient) CloseIdleConnections() {
+ // do nothing
+}
+
+func (c *confidentialClient) Do(r *http.Request) (*http.Response, error) {
+ return doForClient(c.azClient, r)
+}
@@ -0,0 +1,165 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "os"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+)
+
+// DefaultAzureCredentialOptions contains optional parameters for DefaultAzureCredential.
+// These options may not apply to all credentials in the chain.
+type DefaultAzureCredentialOptions struct {
+ // ClientOptions has additional options for credentials that use an Azure SDK HTTP pipeline. These options don't apply
+ // to credential types that authenticate via external tools such as the Azure CLI.
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire tokens. Add
+ // the wildcard value "*" to allow the credential to acquire tokens for any tenant. This value can also be
+ // set as a semicolon delimited list of tenants in the environment variable AZURE_ADDITIONALLY_ALLOWED_TENANTS.
+ AdditionallyAllowedTenants []string
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+ // TenantID sets the default tenant for authentication via the Azure CLI and workload identity.
+ TenantID string
+}
+
+// DefaultAzureCredential is a default credential chain for applications that will deploy to Azure.
+// It combines credentials suitable for deployment with credentials suitable for local development.
+// It attempts to authenticate with each of these credential types, in the following order, stopping
+// when one provides a token:
+//
+// - [EnvironmentCredential]
+// - [WorkloadIdentityCredential], if environment variable configuration is set by the Azure workload
+// identity webhook. Use [WorkloadIdentityCredential] directly when not using the webhook or needing
+// more control over its configuration.
+// - [ManagedIdentityCredential]
+// - [AzureCLICredential]
+// - [AzureDeveloperCLICredential]
+//
+// Consult the documentation for these credential types for more information on how they authenticate.
+// Once a credential has successfully authenticated, DefaultAzureCredential will use that credential for
+// every subsequent authentication.
+type DefaultAzureCredential struct {
+ chain *ChainedTokenCredential
+}
+
+// NewDefaultAzureCredential creates a DefaultAzureCredential. Pass nil for options to accept defaults.
+func NewDefaultAzureCredential(options *DefaultAzureCredentialOptions) (*DefaultAzureCredential, error) {
+ var creds []azcore.TokenCredential
+ var errorMessages []string
+
+ if options == nil {
+ options = &DefaultAzureCredentialOptions{}
+ }
+ additionalTenants := options.AdditionallyAllowedTenants
+ if len(additionalTenants) == 0 {
+ if tenants := os.Getenv(azureAdditionallyAllowedTenants); tenants != "" {
+ additionalTenants = strings.Split(tenants, ";")
+ }
+ }
+
+ envCred, err := NewEnvironmentCredential(&EnvironmentCredentialOptions{
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ additionallyAllowedTenants: additionalTenants,
+ })
+ if err == nil {
+ creds = append(creds, envCred)
+ } else {
+ errorMessages = append(errorMessages, "EnvironmentCredential: "+err.Error())
+ creds = append(creds, &defaultCredentialErrorReporter{credType: "EnvironmentCredential", err: err})
+ }
+
+ wic, err := NewWorkloadIdentityCredential(&WorkloadIdentityCredentialOptions{
+ AdditionallyAllowedTenants: additionalTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ TenantID: options.TenantID,
+ })
+ if err == nil {
+ creds = append(creds, wic)
+ } else {
+ errorMessages = append(errorMessages, credNameWorkloadIdentity+": "+err.Error())
+ creds = append(creds, &defaultCredentialErrorReporter{credType: credNameWorkloadIdentity, err: err})
+ }
+
+ o := &ManagedIdentityCredentialOptions{ClientOptions: options.ClientOptions, dac: true}
+ if ID, ok := os.LookupEnv(azureClientID); ok {
+ o.ID = ClientID(ID)
+ }
+ miCred, err := NewManagedIdentityCredential(o)
+ if err == nil {
+ creds = append(creds, miCred)
+ } else {
+ errorMessages = append(errorMessages, credNameManagedIdentity+": "+err.Error())
+ creds = append(creds, &defaultCredentialErrorReporter{credType: credNameManagedIdentity, err: err})
+ }
+
+ cliCred, err := NewAzureCLICredential(&AzureCLICredentialOptions{AdditionallyAllowedTenants: additionalTenants, TenantID: options.TenantID})
+ if err == nil {
+ creds = append(creds, cliCred)
+ } else {
+ errorMessages = append(errorMessages, credNameAzureCLI+": "+err.Error())
+ creds = append(creds, &defaultCredentialErrorReporter{credType: credNameAzureCLI, err: err})
+ }
+
+ azdCred, err := NewAzureDeveloperCLICredential(&AzureDeveloperCLICredentialOptions{
+ AdditionallyAllowedTenants: additionalTenants,
+ TenantID: options.TenantID,
+ })
+ if err == nil {
+ creds = append(creds, azdCred)
+ } else {
+ errorMessages = append(errorMessages, credNameAzureDeveloperCLI+": "+err.Error())
+ creds = append(creds, &defaultCredentialErrorReporter{credType: credNameAzureDeveloperCLI, err: err})
+ }
+
+ if len(errorMessages) > 0 {
+ log.Writef(EventAuthentication, "NewDefaultAzureCredential failed to initialize some credentials:\n\t%s", strings.Join(errorMessages, "\n\t"))
+ }
+
+ chain, err := NewChainedTokenCredential(creds, nil)
+ if err != nil {
+ return nil, err
+ }
+ chain.name = "DefaultAzureCredential"
+ return &DefaultAzureCredential{chain: chain}, nil
+}
+
+// GetToken requests an access token from Microsoft Entra ID. This method is called automatically by Azure SDK clients.
+func (c *DefaultAzureCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ return c.chain.GetToken(ctx, opts)
+}
+
+var _ azcore.TokenCredential = (*DefaultAzureCredential)(nil)
+
+// defaultCredentialErrorReporter is a substitute for credentials that couldn't be constructed.
+// Its GetToken method always returns a credentialUnavailableError having the same message as
+// the error that prevented constructing the credential. This ensures the message is present
+// in the error returned by ChainedTokenCredential.GetToken()
+type defaultCredentialErrorReporter struct {
+ credType string
+ err error
+}
+
+func (d *defaultCredentialErrorReporter) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ if _, ok := d.err.(credentialUnavailable); ok {
+ return azcore.AccessToken{}, d.err
+ }
+ return azcore.AccessToken{}, newCredentialUnavailableError(d.credType, d.err.Error())
+}
+
+var _ azcore.TokenCredential = (*defaultCredentialErrorReporter)(nil)
@@ -0,0 +1,38 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "errors"
+ "time"
+)
+
+// cliTimeout is the default timeout for authentication attempts via CLI tools
+const cliTimeout = 10 * time.Second
+
+// unavailableIfInChain returns err or, if the credential was invoked by DefaultAzureCredential, a
+// credentialUnavailableError having the same message. This ensures DefaultAzureCredential will try
+// the next credential in its chain (another developer credential).
+func unavailableIfInChain(err error, inDefaultChain bool) error {
+ if err != nil && inDefaultChain {
+ var unavailableErr credentialUnavailable
+ if !errors.As(err, &unavailableErr) {
+ err = newCredentialUnavailableError(credNameAzureDeveloperCLI, err.Error())
+ }
+ }
+ return err
+}
+
+// validScope is for credentials authenticating via external tools. The authority validates scopes for all other credentials.
+func validScope(scope string) bool {
+ for _, r := range scope {
+ if !(alphanumeric(r) || r == '.' || r == '-' || r == '_' || r == '/' || r == ':') {
+ return false
+ }
+ }
+ return true
+}
@@ -0,0 +1,138 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "fmt"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+)
+
+const credNameDeviceCode = "DeviceCodeCredential"
+
+// DeviceCodeCredentialOptions contains optional parameters for DeviceCodeCredential.
+type DeviceCodeCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire
+ // tokens. Add the wildcard value "*" to allow the credential to acquire tokens for any tenant.
+ AdditionallyAllowedTenants []string
+
+ // authenticationRecord returned by a call to a credential's Authenticate method. Set this option
+ // to enable the credential to use data from a previous authentication.
+ authenticationRecord authenticationRecord
+
+ // ClientID is the ID of the application users will authenticate to.
+ // Defaults to the ID of an Azure development application.
+ ClientID string
+
+ // disableAutomaticAuthentication prevents the credential from automatically prompting the user to authenticate.
+ // When this option is true, GetToken will return authenticationRequiredError when user interaction is necessary
+ // to acquire a token.
+ disableAutomaticAuthentication bool
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+
+ // TenantID is the Microsoft Entra tenant the credential authenticates in. Defaults to the
+ // "organizations" tenant, which can authenticate work and school accounts. Required for single-tenant
+ // applications.
+ TenantID string
+
+ // tokenCachePersistenceOptions enables persistent token caching when not nil.
+ tokenCachePersistenceOptions *tokenCachePersistenceOptions
+
+ // UserPrompt controls how the credential presents authentication instructions. The credential calls
+ // this function with authentication details when it receives a device code. By default, the credential
+ // prints these details to stdout.
+ UserPrompt func(context.Context, DeviceCodeMessage) error
+}
+
+func (o *DeviceCodeCredentialOptions) init() {
+ if o.TenantID == "" {
+ o.TenantID = organizationsTenantID
+ }
+ if o.ClientID == "" {
+ o.ClientID = developerSignOnClientID
+ }
+ if o.UserPrompt == nil {
+ o.UserPrompt = func(ctx context.Context, dc DeviceCodeMessage) error {
+ fmt.Println(dc.Message)
+ return nil
+ }
+ }
+}
+
+// DeviceCodeMessage contains the information a user needs to complete authentication.
+type DeviceCodeMessage struct {
+ // UserCode is the user code returned by the service.
+ UserCode string `json:"user_code"`
+ // VerificationURL is the URL at which the user must authenticate.
+ VerificationURL string `json:"verification_uri"`
+ // Message is user instruction from Microsoft Entra ID.
+ Message string `json:"message"`
+}
+
+// DeviceCodeCredential acquires tokens for a user via the device code flow, which has the
+// user browse to a Microsoft Entra URL, enter a code, and authenticate. It's useful
+// for authenticating a user in an environment without a web browser, such as an SSH session.
+// If a web browser is available, [InteractiveBrowserCredential] is more convenient because it
+// automatically opens a browser to the login page.
+type DeviceCodeCredential struct {
+ client *publicClient
+}
+
+// NewDeviceCodeCredential creates a DeviceCodeCredential. Pass nil to accept default options.
+func NewDeviceCodeCredential(options *DeviceCodeCredentialOptions) (*DeviceCodeCredential, error) {
+ cp := DeviceCodeCredentialOptions{}
+ if options != nil {
+ cp = *options
+ }
+ cp.init()
+ msalOpts := publicClientOptions{
+ AdditionallyAllowedTenants: cp.AdditionallyAllowedTenants,
+ ClientOptions: cp.ClientOptions,
+ DeviceCodePrompt: cp.UserPrompt,
+ DisableAutomaticAuthentication: cp.disableAutomaticAuthentication,
+ DisableInstanceDiscovery: cp.DisableInstanceDiscovery,
+ Record: cp.authenticationRecord,
+ TokenCachePersistenceOptions: cp.tokenCachePersistenceOptions,
+ }
+ c, err := newPublicClient(cp.TenantID, cp.ClientID, credNameDeviceCode, msalOpts)
+ if err != nil {
+ return nil, err
+ }
+ c.name = credNameDeviceCode
+ return &DeviceCodeCredential{client: c}, nil
+}
+
+// Authenticate a user via the device code flow. Subsequent calls to GetToken will automatically use the returned AuthenticationRecord.
+func (c *DeviceCodeCredential) authenticate(ctx context.Context, opts *policy.TokenRequestOptions) (authenticationRecord, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameDeviceCode+"."+traceOpAuthenticate, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.Authenticate(ctx, opts)
+ return tk, err
+}
+
+// GetToken requests an access token from Microsoft Entra ID. It will begin the device code flow and poll until the user completes authentication.
+// This method is called automatically by Azure SDK clients.
+func (c *DeviceCodeCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameDeviceCode+"."+traceOpGetToken, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.GetToken(ctx, opts)
+ return tk, err
+}
+
+var _ azcore.TokenCredential = (*DeviceCodeCredential)(nil)
@@ -0,0 +1,167 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+)
+
+const envVarSendCertChain = "AZURE_CLIENT_SEND_CERTIFICATE_CHAIN"
+
+// EnvironmentCredentialOptions contains optional parameters for EnvironmentCredential
+type EnvironmentCredentialOptions struct {
+ azcore.ClientOptions
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+ // additionallyAllowedTenants is used only by NewDefaultAzureCredential() to enable that constructor's explicit
+ // option to override the value of AZURE_ADDITIONALLY_ALLOWED_TENANTS. Applications using EnvironmentCredential
+ // directly should set that variable instead. This field should remain unexported to preserve this credential's
+ // unambiguous "all configuration from environment variables" design.
+ additionallyAllowedTenants []string
+}
+
+// EnvironmentCredential authenticates a service principal with a secret or certificate, or a user with a password, depending
+// on environment variable configuration. It reads configuration from these variables, in the following order:
+//
+// # Service principal with client secret
+//
+// AZURE_TENANT_ID: ID of the service principal's tenant. Also called its "directory" ID.
+//
+// AZURE_CLIENT_ID: the service principal's client ID
+//
+// AZURE_CLIENT_SECRET: one of the service principal's client secrets
+//
+// # Service principal with certificate
+//
+// AZURE_TENANT_ID: ID of the service principal's tenant. Also called its "directory" ID.
+//
+// AZURE_CLIENT_ID: the service principal's client ID
+//
+// AZURE_CLIENT_CERTIFICATE_PATH: path to a PEM or PKCS12 certificate file including the private key.
+//
+// AZURE_CLIENT_CERTIFICATE_PASSWORD: (optional) password for the certificate file.
+//
+// Note that this credential uses [ParseCertificates] to load the certificate and key from the file. If this
+// function isn't able to parse your certificate, use [ClientCertificateCredential] instead.
+//
+// # User with username and password
+//
+// AZURE_TENANT_ID: (optional) tenant to authenticate in. Defaults to "organizations".
+//
+// AZURE_CLIENT_ID: client ID of the application the user will authenticate to
+//
+// AZURE_USERNAME: a username (usually an email address)
+//
+// AZURE_PASSWORD: the user's password
+//
+// # Configuration for multitenant applications
+//
+// To enable multitenant authentication, set AZURE_ADDITIONALLY_ALLOWED_TENANTS with a semicolon delimited list of tenants
+// the credential may request tokens from in addition to the tenant specified by AZURE_TENANT_ID. Set
+// AZURE_ADDITIONALLY_ALLOWED_TENANTS to "*" to enable the credential to request a token from any tenant.
+type EnvironmentCredential struct {
+ cred azcore.TokenCredential
+}
+
+// NewEnvironmentCredential creates an EnvironmentCredential. Pass nil to accept default options.
+func NewEnvironmentCredential(options *EnvironmentCredentialOptions) (*EnvironmentCredential, error) {
+ if options == nil {
+ options = &EnvironmentCredentialOptions{}
+ }
+ tenantID := os.Getenv(azureTenantID)
+ if tenantID == "" {
+ return nil, errors.New("missing environment variable AZURE_TENANT_ID")
+ }
+ clientID := os.Getenv(azureClientID)
+ if clientID == "" {
+ return nil, errors.New("missing environment variable " + azureClientID)
+ }
+ // tenants set by NewDefaultAzureCredential() override the value of AZURE_ADDITIONALLY_ALLOWED_TENANTS
+ additionalTenants := options.additionallyAllowedTenants
+ if len(additionalTenants) == 0 {
+ if tenants := os.Getenv(azureAdditionallyAllowedTenants); tenants != "" {
+ additionalTenants = strings.Split(tenants, ";")
+ }
+ }
+ if clientSecret := os.Getenv(azureClientSecret); clientSecret != "" {
+ log.Write(EventAuthentication, "EnvironmentCredential will authenticate with ClientSecretCredential")
+ o := &ClientSecretCredentialOptions{
+ AdditionallyAllowedTenants: additionalTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ }
+ cred, err := NewClientSecretCredential(tenantID, clientID, clientSecret, o)
+ if err != nil {
+ return nil, err
+ }
+ return &EnvironmentCredential{cred: cred}, nil
+ }
+ if certPath := os.Getenv(azureClientCertificatePath); certPath != "" {
+ log.Write(EventAuthentication, "EnvironmentCredential will authenticate with ClientCertificateCredential")
+ certData, err := os.ReadFile(certPath)
+ if err != nil {
+ return nil, fmt.Errorf(`failed to read certificate file "%s": %v`, certPath, err)
+ }
+ var password []byte
+ if v := os.Getenv(azureClientCertificatePassword); v != "" {
+ password = []byte(v)
+ }
+ certs, key, err := ParseCertificates(certData, password)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse %q due to error %q. This may be due to a limitation of this module's certificate loader. Consider calling NewClientCertificateCredential instead", certPath, err.Error())
+ }
+ o := &ClientCertificateCredentialOptions{
+ AdditionallyAllowedTenants: additionalTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ }
+ if v, ok := os.LookupEnv(envVarSendCertChain); ok {
+ o.SendCertificateChain = v == "1" || strings.ToLower(v) == "true"
+ }
+ cred, err := NewClientCertificateCredential(tenantID, clientID, certs, key, o)
+ if err != nil {
+ return nil, err
+ }
+ return &EnvironmentCredential{cred: cred}, nil
+ }
+ if username := os.Getenv(azureUsername); username != "" {
+ if password := os.Getenv(azurePassword); password != "" {
+ log.Write(EventAuthentication, "EnvironmentCredential will authenticate with UsernamePasswordCredential")
+ o := &UsernamePasswordCredentialOptions{
+ AdditionallyAllowedTenants: additionalTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ }
+ cred, err := NewUsernamePasswordCredential(tenantID, clientID, username, password, o)
+ if err != nil {
+ return nil, err
+ }
+ return &EnvironmentCredential{cred: cred}, nil
+ }
+ return nil, errors.New("no value for AZURE_PASSWORD")
+ }
+ return nil, errors.New("incomplete environment variable configuration. Only AZURE_TENANT_ID and AZURE_CLIENT_ID are set")
+}
+
+// GetToken requests an access token from Microsoft Entra ID. This method is called automatically by Azure SDK clients.
+func (c *EnvironmentCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ return c.cred.GetToken(ctx, opts)
+}
+
+var _ azcore.TokenCredential = (*EnvironmentCredential)(nil)
@@ -0,0 +1,170 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/errorinfo"
+ msal "github.com/AzureAD/microsoft-authentication-library-for-go/apps/errors"
+)
+
+// getResponseFromError retrieves the response carried by
+// an AuthenticationFailedError or MSAL CallErr, if any
+func getResponseFromError(err error) *http.Response {
+ var a *AuthenticationFailedError
+ var c msal.CallErr
+ var res *http.Response
+ if errors.As(err, &c) {
+ res = c.Resp
+ } else if errors.As(err, &a) {
+ res = a.RawResponse
+ }
+ return res
+}
+
+// AuthenticationFailedError indicates an authentication request has failed.
+type AuthenticationFailedError struct {
+ // RawResponse is the HTTP response motivating the error, if available.
+ RawResponse *http.Response
+
+ credType string
+ message string
+ err error
+}
+
+func newAuthenticationFailedError(credType string, message string, resp *http.Response, err error) error {
+ return &AuthenticationFailedError{credType: credType, message: message, RawResponse: resp, err: err}
+}
+
+// Error implements the error interface. Note that the message contents are not contractual and can change over time.
+func (e *AuthenticationFailedError) Error() string {
+ if e.RawResponse == nil {
+ return e.credType + ": " + e.message
+ }
+ msg := &bytes.Buffer{}
+ fmt.Fprintf(msg, "%s authentication failed. %s\n", e.credType, e.message)
+ if e.RawResponse.Request != nil {
+ fmt.Fprintf(msg, "%s %s://%s%s\n", e.RawResponse.Request.Method, e.RawResponse.Request.URL.Scheme, e.RawResponse.Request.URL.Host, e.RawResponse.Request.URL.Path)
+ } else {
+ // this happens when the response is created from a custom HTTP transporter,
+ // which doesn't guarantee to bind the original request to the response
+ fmt.Fprintln(msg, "Request information not available")
+ }
+ fmt.Fprintln(msg, "--------------------------------------------------------------------------------")
+ fmt.Fprintf(msg, "RESPONSE %s\n", e.RawResponse.Status)
+ fmt.Fprintln(msg, "--------------------------------------------------------------------------------")
+ body, err := runtime.Payload(e.RawResponse)
+ switch {
+ case err != nil:
+ fmt.Fprintf(msg, "Error reading response body: %v", err)
+ case len(body) > 0:
+ if err := json.Indent(msg, body, "", " "); err != nil {
+ // failed to pretty-print so just dump it verbatim
+ fmt.Fprint(msg, string(body))
+ }
+ default:
+ fmt.Fprint(msg, "Response contained no body")
+ }
+ fmt.Fprintln(msg, "\n--------------------------------------------------------------------------------")
+ var anchor string
+ switch e.credType {
+ case credNameAzureCLI:
+ anchor = "azure-cli"
+ case credNameAzureDeveloperCLI:
+ anchor = "azd"
+ case credNameAzurePipelines:
+ anchor = "apc"
+ case credNameCert:
+ anchor = "client-cert"
+ case credNameSecret:
+ anchor = "client-secret"
+ case credNameManagedIdentity:
+ anchor = "managed-id"
+ case credNameUserPassword:
+ anchor = "username-password"
+ case credNameWorkloadIdentity:
+ anchor = "workload"
+ }
+ if anchor != "" {
+ fmt.Fprintf(msg, "To troubleshoot, visit https://aka.ms/azsdk/go/identity/troubleshoot#%s", anchor)
+ }
+ return msg.String()
+}
+
+// NonRetriable indicates the request which provoked this error shouldn't be retried.
+func (*AuthenticationFailedError) NonRetriable() {
+ // marker method
+}
+
+var _ errorinfo.NonRetriable = (*AuthenticationFailedError)(nil)
+
+// authenticationRequiredError indicates a credential's Authenticate method must be called to acquire a token
+// because the credential requires user interaction and is configured not to request it automatically.
+type authenticationRequiredError struct {
+ credentialUnavailableError
+
+ // TokenRequestOptions for the required token. Pass this to the credential's Authenticate method.
+ TokenRequestOptions policy.TokenRequestOptions
+}
+
+func newauthenticationRequiredError(credType string, tro policy.TokenRequestOptions) error {
+ return &authenticationRequiredError{
+ credentialUnavailableError: credentialUnavailableError{
+ credType + " can't acquire a token without user interaction. Call Authenticate to authenticate a user interactively",
+ },
+ TokenRequestOptions: tro,
+ }
+}
+
+var (
+ _ credentialUnavailable = (*authenticationRequiredError)(nil)
+ _ errorinfo.NonRetriable = (*authenticationRequiredError)(nil)
+)
+
+type credentialUnavailable interface {
+ error
+ credentialUnavailable()
+}
+
+type credentialUnavailableError struct {
+ message string
+}
+
+// newCredentialUnavailableError is an internal helper that ensures consistent error message formatting
+func newCredentialUnavailableError(credType, message string) error {
+ msg := fmt.Sprintf("%s: %s", credType, message)
+ return &credentialUnavailableError{msg}
+}
+
+// NewCredentialUnavailableError constructs an error indicating a credential can't attempt authentication
+// because it lacks required data or state. When [ChainedTokenCredential] receives this error it will try
+// its next credential, if any.
+func NewCredentialUnavailableError(message string) error {
+ return &credentialUnavailableError{message}
+}
+
+// Error implements the error interface. Note that the message contents are not contractual and can change over time.
+func (e *credentialUnavailableError) Error() string {
+ return e.message
+}
+
+// NonRetriable is a marker method indicating this error should not be retried. It has no implementation.
+func (*credentialUnavailableError) NonRetriable() {}
+
+func (*credentialUnavailableError) credentialUnavailable() {}
+
+var (
+ _ credentialUnavailable = (*credentialUnavailableError)(nil)
+ _ errorinfo.NonRetriable = (*credentialUnavailableError)(nil)
+)
@@ -0,0 +1,60 @@
+github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0-beta.1 h1:ODs3brnqQM99Tq1PffODpAViYv3Bf8zOg464MU7p5ew=
+github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0-beta.1/go.mod h1:3Ug6Qzto9anB6mGlEdgYMDF5zHQ+wwhEaYR4s17PHMw=
+github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0 h1:fb8kj/Dh4CSwgsOzHeZY4Xh68cFVbzXx+ONXGMY//4w=
+github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.0/go.mod h1:uReU2sSxZExRPBAg3qKzmAucSi51+SP1OhohieR821Q=
+github.com/Azure/azure-sdk-for-go/sdk/internal v1.3.0/go.mod h1:okt5dMMTOFjX/aovMlrjvvXoPMBVSPzk9185BT0+eZM=
+github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.2/go.mod h1:yInRyqWXAuaPrgI7p70+lDDgh3mlBohis29jGMISnmc=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=
+github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/keybase/dbus v0.0.0-20220506165403-5aa21ea2c23a/go.mod h1:YPNKjjE7Ubp9dTbnWvsP3HT+hYnY6TfXzubYTBeUxc8=
+github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/montanaflynn/stats v0.7.0/go.mod h1:etXPPgVO6n31NxCd9KQUMvCM+ve0ruNzt6R8Bnaayow=
+github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
+github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
+github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
+github.com/stretchr/objx v0.5.2/go.mod h1:FRsXN1f5AsAjCGJKqEizvkpNtU+EGNCLh3NxZ/8L+MA=
+github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
+github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
+github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
+golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
+golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
+golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
+golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
+golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc=
+golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
+golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
+golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
+golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
+golang.org/x/net v0.24.0/go.mod h1:2Q7sJY5mzlzWjKtYUEXSlBWCdyaioyXzRB2RtU8KVE8=
+golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
+golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
+golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
+golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
+golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
+golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
+golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
+golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
+golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
+golang.org/x/term v0.19.0/go.mod h1:2CuTdWZ7KHSQwUzKva0cbMg6q2DMI3Mmxp+gKJbskEk=
+golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
+golang.org/x/term v0.21.0/go.mod h1:ooXLefLobQVslOqselCNF4SxFAaoS6KujMbsGzSDmX0=
+golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
+golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
+golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
+golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
+gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
+gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
@@ -0,0 +1,118 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+)
+
+const credNameBrowser = "InteractiveBrowserCredential"
+
+// InteractiveBrowserCredentialOptions contains optional parameters for InteractiveBrowserCredential.
+type InteractiveBrowserCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire
+ // tokens. Add the wildcard value "*" to allow the credential to acquire tokens for any tenant.
+ AdditionallyAllowedTenants []string
+
+ // authenticationRecord returned by a call to a credential's Authenticate method. Set this option
+ // to enable the credential to use data from a previous authentication.
+ authenticationRecord authenticationRecord
+
+ // ClientID is the ID of the application users will authenticate to.
+ // Defaults to the ID of an Azure development application.
+ ClientID string
+
+ // disableAutomaticAuthentication prevents the credential from automatically prompting the user to authenticate.
+ // When this option is true, GetToken will return authenticationRequiredError when user interaction is necessary
+ // to acquire a token.
+ disableAutomaticAuthentication bool
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+
+ // LoginHint pre-populates the account prompt with a username. Users may choose to authenticate a different account.
+ LoginHint string
+
+ // RedirectURL is the URL Microsoft Entra ID will redirect to with the access token. This is required
+ // only when setting ClientID, and must match a redirect URI in the application's registration.
+ // Applications which have registered "http://localhost" as a redirect URI need not set this option.
+ RedirectURL string
+
+ // TenantID is the Microsoft Entra tenant the credential authenticates in. Defaults to the
+ // "organizations" tenant, which can authenticate work and school accounts.
+ TenantID string
+
+ // tokenCachePersistenceOptions enables persistent token caching when not nil.
+ tokenCachePersistenceOptions *tokenCachePersistenceOptions
+}
+
+func (o *InteractiveBrowserCredentialOptions) init() {
+ if o.TenantID == "" {
+ o.TenantID = organizationsTenantID
+ }
+ if o.ClientID == "" {
+ o.ClientID = developerSignOnClientID
+ }
+}
+
+// InteractiveBrowserCredential opens a browser to interactively authenticate a user.
+type InteractiveBrowserCredential struct {
+ client *publicClient
+}
+
+// NewInteractiveBrowserCredential constructs a new InteractiveBrowserCredential. Pass nil to accept default options.
+func NewInteractiveBrowserCredential(options *InteractiveBrowserCredentialOptions) (*InteractiveBrowserCredential, error) {
+ cp := InteractiveBrowserCredentialOptions{}
+ if options != nil {
+ cp = *options
+ }
+ cp.init()
+ msalOpts := publicClientOptions{
+ AdditionallyAllowedTenants: cp.AdditionallyAllowedTenants,
+ ClientOptions: cp.ClientOptions,
+ DisableAutomaticAuthentication: cp.disableAutomaticAuthentication,
+ DisableInstanceDiscovery: cp.DisableInstanceDiscovery,
+ LoginHint: cp.LoginHint,
+ Record: cp.authenticationRecord,
+ RedirectURL: cp.RedirectURL,
+ TokenCachePersistenceOptions: cp.tokenCachePersistenceOptions,
+ }
+ c, err := newPublicClient(cp.TenantID, cp.ClientID, credNameBrowser, msalOpts)
+ if err != nil {
+ return nil, err
+ }
+ return &InteractiveBrowserCredential{client: c}, nil
+}
+
+// Authenticate a user via the default browser. Subsequent calls to GetToken will automatically use the returned AuthenticationRecord.
+func (c *InteractiveBrowserCredential) authenticate(ctx context.Context, opts *policy.TokenRequestOptions) (authenticationRecord, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameBrowser+"."+traceOpAuthenticate, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.Authenticate(ctx, opts)
+ return tk, err
+}
+
+// GetToken requests an access token from Microsoft Entra ID. This method is called automatically by Azure SDK clients.
+func (c *InteractiveBrowserCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameBrowser+"."+traceOpGetToken, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.GetToken(ctx, opts)
+ return tk, err
+}
+
+var _ azcore.TokenCredential = (*InteractiveBrowserCredential)(nil)
@@ -0,0 +1,18 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package internal
+
+// TokenCachePersistenceOptions contains options for persistent token caching
+type TokenCachePersistenceOptions struct {
+ // AllowUnencryptedStorage controls whether the cache should fall back to storing its data in plain text
+ // when encryption isn't possible. Setting this true doesn't disable encryption. The cache always attempts
+ // encryption before falling back to plaintext storage.
+ AllowUnencryptedStorage bool
+
+ // Name identifies the cache. Set this to isolate data from other applications.
+ Name string
+}
@@ -0,0 +1,31 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package internal
+
+import (
+ "errors"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/cache"
+)
+
+var errMissingImport = errors.New("import github.com/Azure/azure-sdk-for-go/sdk/azidentity/cache to enable persistent caching")
+
+// NewCache constructs a persistent token cache when "o" isn't nil. Applications that intend to
+// use a persistent cache must first import the cache module, which will replace this function
+// with a platform-specific implementation.
+var NewCache = func(o *TokenCachePersistenceOptions, enableCAE bool) (cache.ExportReplace, error) {
+ if o == nil {
+ return nil, nil
+ }
+ return nil, errMissingImport
+}
+
+// CacheFilePath returns the path to the cache file for the given name.
+// Defining it in this package makes it available to azidentity tests.
+var CacheFilePath = func(name string) (string, error) {
+ return "", errMissingImport
+}
@@ -0,0 +1,14 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+
+// EventAuthentication entries contain information about authentication.
+// This includes information like the names of environment variables
+// used when obtaining credentials and the type of credential used.
+const EventAuthentication log.Event = "Authentication"
@@ -0,0 +1,17 @@
+{
+ "include": [
+ {
+ "Agent": {
+ "msi_image": {
+ "ArmTemplateParameters": "@{deployResources = $true}",
+ "OSVmImage": "env:LINUXNEXTVMIMAGE",
+ "Pool": "env:LINUXPOOL"
+ }
+ },
+ "GoVersion": [
+ "1.22.1"
+ ],
+ "IDENTITY_IMDS_AVAILABLE": "1"
+ }
+ ]
+}
@@ -0,0 +1,501 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "os"
+ "path/filepath"
+ "runtime"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ azruntime "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/streaming"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential"
+)
+
+const (
+ arcIMDSEndpoint = "IMDS_ENDPOINT"
+ defaultIdentityClientID = "DEFAULT_IDENTITY_CLIENT_ID"
+ identityEndpoint = "IDENTITY_ENDPOINT"
+ identityHeader = "IDENTITY_HEADER"
+ identityServerThumbprint = "IDENTITY_SERVER_THUMBPRINT"
+ headerMetadata = "Metadata"
+ imdsEndpoint = "http://169.254.169.254/metadata/identity/oauth2/token"
+ miResID = "mi_res_id"
+ msiEndpoint = "MSI_ENDPOINT"
+ msiResID = "msi_res_id"
+ msiSecret = "MSI_SECRET"
+ imdsAPIVersion = "2018-02-01"
+ azureArcAPIVersion = "2019-08-15"
+ qpClientID = "client_id"
+ serviceFabricAPIVersion = "2019-07-01-preview"
+)
+
+var imdsProbeTimeout = time.Second
+
+type msiType int
+
+const (
+ msiTypeAppService msiType = iota
+ msiTypeAzureArc
+ msiTypeAzureML
+ msiTypeCloudShell
+ msiTypeIMDS
+ msiTypeServiceFabric
+)
+
+type managedIdentityClient struct {
+ azClient *azcore.Client
+ endpoint string
+ id ManagedIDKind
+ msiType msiType
+ probeIMDS bool
+}
+
+// arcKeyDirectory returns the directory expected to contain Azure Arc keys
+var arcKeyDirectory = func() (string, error) {
+ switch runtime.GOOS {
+ case "linux":
+ return "/var/opt/azcmagent/tokens", nil
+ case "windows":
+ pd := os.Getenv("ProgramData")
+ if pd == "" {
+ return "", errors.New("environment variable ProgramData has no value")
+ }
+ return filepath.Join(pd, "AzureConnectedMachineAgent", "Tokens"), nil
+ default:
+ return "", fmt.Errorf("unsupported OS %q", runtime.GOOS)
+ }
+}
+
+type wrappedNumber json.Number
+
+func (n *wrappedNumber) UnmarshalJSON(b []byte) error {
+ c := string(b)
+ if c == "\"\"" {
+ return nil
+ }
+ return json.Unmarshal(b, (*json.Number)(n))
+}
+
+// setIMDSRetryOptionDefaults sets zero-valued fields to default values appropriate for IMDS
+func setIMDSRetryOptionDefaults(o *policy.RetryOptions) {
+ if o.MaxRetries == 0 {
+ o.MaxRetries = 5
+ }
+ if o.MaxRetryDelay == 0 {
+ o.MaxRetryDelay = 1 * time.Minute
+ }
+ if o.RetryDelay == 0 {
+ o.RetryDelay = 2 * time.Second
+ }
+ if o.StatusCodes == nil {
+ o.StatusCodes = []int{
+ // IMDS docs recommend retrying 404, 410, 429 and 5xx
+ // https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/how-to-use-vm-token#error-handling
+ http.StatusNotFound, // 404
+ http.StatusGone, // 410
+ http.StatusTooManyRequests, // 429
+ http.StatusInternalServerError, // 500
+ http.StatusNotImplemented, // 501
+ http.StatusBadGateway, // 502
+ http.StatusServiceUnavailable, // 503
+ http.StatusGatewayTimeout, // 504
+ http.StatusHTTPVersionNotSupported, // 505
+ http.StatusVariantAlsoNegotiates, // 506
+ http.StatusInsufficientStorage, // 507
+ http.StatusLoopDetected, // 508
+ http.StatusNotExtended, // 510
+ http.StatusNetworkAuthenticationRequired, // 511
+ }
+ }
+ if o.TryTimeout == 0 {
+ o.TryTimeout = 1 * time.Minute
+ }
+}
+
+// newManagedIdentityClient creates a new instance of the ManagedIdentityClient with the ManagedIdentityCredentialOptions
+// that are passed into it along with a default pipeline.
+// options: ManagedIdentityCredentialOptions configure policies for the pipeline and the authority host that
+// will be used to retrieve tokens and authenticate
+func newManagedIdentityClient(options *ManagedIdentityCredentialOptions) (*managedIdentityClient, error) {
+ if options == nil {
+ options = &ManagedIdentityCredentialOptions{}
+ }
+ cp := options.ClientOptions
+ c := managedIdentityClient{id: options.ID, endpoint: imdsEndpoint, msiType: msiTypeIMDS}
+ env := "IMDS"
+ if endpoint, ok := os.LookupEnv(identityEndpoint); ok {
+ if _, ok := os.LookupEnv(identityHeader); ok {
+ if _, ok := os.LookupEnv(identityServerThumbprint); ok {
+ env = "Service Fabric"
+ c.endpoint = endpoint
+ c.msiType = msiTypeServiceFabric
+ } else {
+ env = "App Service"
+ c.endpoint = endpoint
+ c.msiType = msiTypeAppService
+ }
+ } else if _, ok := os.LookupEnv(arcIMDSEndpoint); ok {
+ env = "Azure Arc"
+ c.endpoint = endpoint
+ c.msiType = msiTypeAzureArc
+ }
+ } else if endpoint, ok := os.LookupEnv(msiEndpoint); ok {
+ c.endpoint = endpoint
+ if _, ok := os.LookupEnv(msiSecret); ok {
+ env = "Azure ML"
+ c.msiType = msiTypeAzureML
+ } else {
+ env = "Cloud Shell"
+ c.msiType = msiTypeCloudShell
+ }
+ } else {
+ c.probeIMDS = options.dac
+ setIMDSRetryOptionDefaults(&cp.Retry)
+ }
+
+ client, err := azcore.NewClient(module, version, azruntime.PipelineOptions{
+ Tracing: azruntime.TracingOptions{
+ Namespace: traceNamespace,
+ },
+ }, &cp)
+ if err != nil {
+ return nil, err
+ }
+ c.azClient = client
+
+ if log.Should(EventAuthentication) {
+ log.Writef(EventAuthentication, "Managed Identity Credential will use %s managed identity", env)
+ }
+
+ return &c, nil
+}
+
+// provideToken acquires a token for MSAL's confidential.Client, which caches the token
+func (c *managedIdentityClient) provideToken(ctx context.Context, params confidential.TokenProviderParameters) (confidential.TokenProviderResult, error) {
+ result := confidential.TokenProviderResult{}
+ tk, err := c.authenticate(ctx, c.id, params.Scopes)
+ if err == nil {
+ result.AccessToken = tk.Token
+ result.ExpiresInSeconds = int(time.Until(tk.ExpiresOn).Seconds())
+ }
+ return result, err
+}
+
+// authenticate acquires an access token
+func (c *managedIdentityClient) authenticate(ctx context.Context, id ManagedIDKind, scopes []string) (azcore.AccessToken, error) {
+ // no need to synchronize around this value because it's true only when DefaultAzureCredential constructed the client,
+ // and in that case ChainedTokenCredential.GetToken synchronizes goroutines that would execute this block
+ if c.probeIMDS {
+ cx, cancel := context.WithTimeout(ctx, imdsProbeTimeout)
+ defer cancel()
+ cx = policy.WithRetryOptions(cx, policy.RetryOptions{MaxRetries: -1})
+ req, err := azruntime.NewRequest(cx, http.MethodGet, c.endpoint)
+ if err == nil {
+ _, err = c.azClient.Pipeline().Do(req)
+ }
+ if err != nil {
+ msg := err.Error()
+ if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
+ msg = "managed identity timed out. See https://aka.ms/azsdk/go/identity/troubleshoot#dac for more information"
+ }
+ return azcore.AccessToken{}, newCredentialUnavailableError(credNameManagedIdentity, msg)
+ }
+ // send normal token requests from now on because something responded
+ c.probeIMDS = false
+ }
+
+ msg, err := c.createAuthRequest(ctx, id, scopes)
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+
+ resp, err := c.azClient.Pipeline().Do(msg)
+ if err != nil {
+ return azcore.AccessToken{}, newAuthenticationFailedError(credNameManagedIdentity, err.Error(), nil, err)
+ }
+
+ if azruntime.HasStatusCode(resp, http.StatusOK, http.StatusCreated) {
+ return c.createAccessToken(resp)
+ }
+
+ if c.msiType == msiTypeIMDS {
+ switch resp.StatusCode {
+ case http.StatusBadRequest:
+ if id != nil {
+ return azcore.AccessToken{}, newAuthenticationFailedError(credNameManagedIdentity, "the requested identity isn't assigned to this resource", resp, nil)
+ }
+ msg := "failed to authenticate a system assigned identity"
+ if body, err := azruntime.Payload(resp); err == nil && len(body) > 0 {
+ msg += fmt.Sprintf(". The endpoint responded with %s", body)
+ }
+ return azcore.AccessToken{}, newCredentialUnavailableError(credNameManagedIdentity, msg)
+ case http.StatusForbidden:
+ // Docker Desktop runs a proxy that responds 403 to IMDS token requests. If we get that response,
+ // we return credentialUnavailableError so credential chains continue to their next credential
+ body, err := azruntime.Payload(resp)
+ if err == nil && strings.Contains(string(body), "unreachable") {
+ return azcore.AccessToken{}, newCredentialUnavailableError(credNameManagedIdentity, fmt.Sprintf("unexpected response %q", string(body)))
+ }
+ }
+ }
+
+ return azcore.AccessToken{}, newAuthenticationFailedError(credNameManagedIdentity, "authentication failed", resp, nil)
+}
+
+func (c *managedIdentityClient) createAccessToken(res *http.Response) (azcore.AccessToken, error) {
+ value := struct {
+ // these are the only fields that we use
+ Token string `json:"access_token,omitempty"`
+ RefreshToken string `json:"refresh_token,omitempty"`
+ ExpiresIn wrappedNumber `json:"expires_in,omitempty"` // this field should always return the number of seconds for which a token is valid
+ ExpiresOn interface{} `json:"expires_on,omitempty"` // the value returned in this field varies between a number and a date string
+ }{}
+ if err := azruntime.UnmarshalAsJSON(res, &value); err != nil {
+ return azcore.AccessToken{}, fmt.Errorf("internal AccessToken: %v", err)
+ }
+ if value.ExpiresIn != "" {
+ expiresIn, err := json.Number(value.ExpiresIn).Int64()
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+ return azcore.AccessToken{Token: value.Token, ExpiresOn: time.Now().Add(time.Second * time.Duration(expiresIn)).UTC()}, nil
+ }
+ switch v := value.ExpiresOn.(type) {
+ case float64:
+ return azcore.AccessToken{Token: value.Token, ExpiresOn: time.Unix(int64(v), 0).UTC()}, nil
+ case string:
+ if expiresOn, err := strconv.Atoi(v); err == nil {
+ return azcore.AccessToken{Token: value.Token, ExpiresOn: time.Unix(int64(expiresOn), 0).UTC()}, nil
+ }
+ return azcore.AccessToken{}, newAuthenticationFailedError(credNameManagedIdentity, "unexpected expires_on value: "+v, res, nil)
+ default:
+ msg := fmt.Sprintf("unsupported type received in expires_on: %T, %v", v, v)
+ return azcore.AccessToken{}, newAuthenticationFailedError(credNameManagedIdentity, msg, res, nil)
+ }
+}
+
+func (c *managedIdentityClient) createAuthRequest(ctx context.Context, id ManagedIDKind, scopes []string) (*policy.Request, error) {
+ switch c.msiType {
+ case msiTypeIMDS:
+ return c.createIMDSAuthRequest(ctx, id, scopes)
+ case msiTypeAppService:
+ return c.createAppServiceAuthRequest(ctx, id, scopes)
+ case msiTypeAzureArc:
+ // need to perform preliminary request to retreive the secret key challenge provided by the HIMDS service
+ key, err := c.getAzureArcSecretKey(ctx, scopes)
+ if err != nil {
+ msg := fmt.Sprintf("failed to retreive secret key from the identity endpoint: %v", err)
+ return nil, newAuthenticationFailedError(credNameManagedIdentity, msg, nil, err)
+ }
+ return c.createAzureArcAuthRequest(ctx, id, scopes, key)
+ case msiTypeAzureML:
+ return c.createAzureMLAuthRequest(ctx, id, scopes)
+ case msiTypeServiceFabric:
+ return c.createServiceFabricAuthRequest(ctx, id, scopes)
+ case msiTypeCloudShell:
+ return c.createCloudShellAuthRequest(ctx, id, scopes)
+ default:
+ return nil, newCredentialUnavailableError(credNameManagedIdentity, "managed identity isn't supported in this environment")
+ }
+}
+
+func (c *managedIdentityClient) createIMDSAuthRequest(ctx context.Context, id ManagedIDKind, scopes []string) (*policy.Request, error) {
+ request, err := azruntime.NewRequest(ctx, http.MethodGet, c.endpoint)
+ if err != nil {
+ return nil, err
+ }
+ request.Raw().Header.Set(headerMetadata, "true")
+ q := request.Raw().URL.Query()
+ q.Add("api-version", imdsAPIVersion)
+ q.Add("resource", strings.Join(scopes, " "))
+ if id != nil {
+ if id.idKind() == miResourceID {
+ q.Add(msiResID, id.String())
+ } else {
+ q.Add(qpClientID, id.String())
+ }
+ }
+ request.Raw().URL.RawQuery = q.Encode()
+ return request, nil
+}
+
+func (c *managedIdentityClient) createAppServiceAuthRequest(ctx context.Context, id ManagedIDKind, scopes []string) (*policy.Request, error) {
+ request, err := azruntime.NewRequest(ctx, http.MethodGet, c.endpoint)
+ if err != nil {
+ return nil, err
+ }
+ request.Raw().Header.Set("X-IDENTITY-HEADER", os.Getenv(identityHeader))
+ q := request.Raw().URL.Query()
+ q.Add("api-version", "2019-08-01")
+ q.Add("resource", scopes[0])
+ if id != nil {
+ if id.idKind() == miResourceID {
+ q.Add(miResID, id.String())
+ } else {
+ q.Add(qpClientID, id.String())
+ }
+ }
+ request.Raw().URL.RawQuery = q.Encode()
+ return request, nil
+}
+
+func (c *managedIdentityClient) createAzureMLAuthRequest(ctx context.Context, id ManagedIDKind, scopes []string) (*policy.Request, error) {
+ request, err := azruntime.NewRequest(ctx, http.MethodGet, c.endpoint)
+ if err != nil {
+ return nil, err
+ }
+ request.Raw().Header.Set("secret", os.Getenv(msiSecret))
+ q := request.Raw().URL.Query()
+ q.Add("api-version", "2017-09-01")
+ q.Add("resource", strings.Join(scopes, " "))
+ q.Add("clientid", os.Getenv(defaultIdentityClientID))
+ if id != nil {
+ if id.idKind() == miResourceID {
+ log.Write(EventAuthentication, "WARNING: Azure ML doesn't support specifying a managed identity by resource ID")
+ q.Set("clientid", "")
+ q.Set(miResID, id.String())
+ } else {
+ q.Set("clientid", id.String())
+ }
+ }
+ request.Raw().URL.RawQuery = q.Encode()
+ return request, nil
+}
+
+func (c *managedIdentityClient) createServiceFabricAuthRequest(ctx context.Context, id ManagedIDKind, scopes []string) (*policy.Request, error) {
+ request, err := azruntime.NewRequest(ctx, http.MethodGet, c.endpoint)
+ if err != nil {
+ return nil, err
+ }
+ q := request.Raw().URL.Query()
+ request.Raw().Header.Set("Accept", "application/json")
+ request.Raw().Header.Set("Secret", os.Getenv(identityHeader))
+ q.Add("api-version", serviceFabricAPIVersion)
+ q.Add("resource", strings.Join(scopes, " "))
+ if id != nil {
+ log.Write(EventAuthentication, "WARNING: Service Fabric doesn't support selecting a user-assigned identity at runtime")
+ if id.idKind() == miResourceID {
+ q.Add(miResID, id.String())
+ } else {
+ q.Add(qpClientID, id.String())
+ }
+ }
+ request.Raw().URL.RawQuery = q.Encode()
+ return request, nil
+}
+
+func (c *managedIdentityClient) getAzureArcSecretKey(ctx context.Context, resources []string) (string, error) {
+ // create the request to retreive the secret key challenge provided by the HIMDS service
+ request, err := azruntime.NewRequest(ctx, http.MethodGet, c.endpoint)
+ if err != nil {
+ return "", err
+ }
+ request.Raw().Header.Set(headerMetadata, "true")
+ q := request.Raw().URL.Query()
+ q.Add("api-version", azureArcAPIVersion)
+ q.Add("resource", strings.Join(resources, " "))
+ request.Raw().URL.RawQuery = q.Encode()
+ // send the initial request to get the short-lived secret key
+ response, err := c.azClient.Pipeline().Do(request)
+ if err != nil {
+ return "", err
+ }
+ // the endpoint is expected to return a 401 with the WWW-Authenticate header set to the location
+ // of the secret key file. Any other status code indicates an error in the request.
+ if response.StatusCode != 401 {
+ msg := fmt.Sprintf("expected a 401 response, received %d", response.StatusCode)
+ return "", newAuthenticationFailedError(credNameManagedIdentity, msg, response, nil)
+ }
+ header := response.Header.Get("WWW-Authenticate")
+ if len(header) == 0 {
+ return "", newAuthenticationFailedError(credNameManagedIdentity, "HIMDS response has no WWW-Authenticate header", nil, nil)
+ }
+ // the WWW-Authenticate header is expected in the following format: Basic realm=/some/file/path.key
+ _, p, found := strings.Cut(header, "=")
+ if !found {
+ return "", newAuthenticationFailedError(credNameManagedIdentity, "unexpected WWW-Authenticate header from HIMDS: "+header, nil, nil)
+ }
+ expected, err := arcKeyDirectory()
+ if err != nil {
+ return "", err
+ }
+ if filepath.Dir(p) != expected || !strings.HasSuffix(p, ".key") {
+ return "", newAuthenticationFailedError(credNameManagedIdentity, "unexpected file path from HIMDS service: "+p, nil, nil)
+ }
+ f, err := os.Stat(p)
+ if err != nil {
+ return "", newAuthenticationFailedError(credNameManagedIdentity, fmt.Sprintf("could not stat %q: %v", p, err), nil, nil)
+ }
+ if s := f.Size(); s > 4096 {
+ return "", newAuthenticationFailedError(credNameManagedIdentity, fmt.Sprintf("key is too large (%d bytes)", s), nil, nil)
+ }
+ key, err := os.ReadFile(p)
+ if err != nil {
+ return "", newAuthenticationFailedError(credNameManagedIdentity, fmt.Sprintf("could not read %q: %v", p, err), nil, nil)
+ }
+ return string(key), nil
+}
+
+func (c *managedIdentityClient) createAzureArcAuthRequest(ctx context.Context, id ManagedIDKind, resources []string, key string) (*policy.Request, error) {
+ request, err := azruntime.NewRequest(ctx, http.MethodGet, c.endpoint)
+ if err != nil {
+ return nil, err
+ }
+ request.Raw().Header.Set(headerMetadata, "true")
+ request.Raw().Header.Set("Authorization", fmt.Sprintf("Basic %s", key))
+ q := request.Raw().URL.Query()
+ q.Add("api-version", azureArcAPIVersion)
+ q.Add("resource", strings.Join(resources, " "))
+ if id != nil {
+ log.Write(EventAuthentication, "WARNING: Azure Arc doesn't support user-assigned managed identities")
+ if id.idKind() == miResourceID {
+ q.Add(miResID, id.String())
+ } else {
+ q.Add(qpClientID, id.String())
+ }
+ }
+ request.Raw().URL.RawQuery = q.Encode()
+ return request, nil
+}
+
+func (c *managedIdentityClient) createCloudShellAuthRequest(ctx context.Context, id ManagedIDKind, scopes []string) (*policy.Request, error) {
+ request, err := azruntime.NewRequest(ctx, http.MethodPost, c.endpoint)
+ if err != nil {
+ return nil, err
+ }
+ request.Raw().Header.Set(headerMetadata, "true")
+ data := url.Values{}
+ data.Set("resource", strings.Join(scopes, " "))
+ dataEncoded := data.Encode()
+ body := streaming.NopCloser(strings.NewReader(dataEncoded))
+ if err := request.SetBody(body, "application/x-www-form-urlencoded"); err != nil {
+ return nil, err
+ }
+ if id != nil {
+ log.Write(EventAuthentication, "WARNING: Cloud Shell doesn't support user-assigned managed identities")
+ q := request.Raw().URL.Query()
+ if id.idKind() == miResourceID {
+ q.Add(miResID, id.String())
+ } else {
+ q.Add(qpClientID, id.String())
+ }
+ }
+ return request, nil
+}
@@ -0,0 +1,128 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential"
+)
+
+const credNameManagedIdentity = "ManagedIdentityCredential"
+
+type managedIdentityIDKind int
+
+const (
+ miClientID managedIdentityIDKind = 0
+ miResourceID managedIdentityIDKind = 1
+)
+
+// ManagedIDKind identifies the ID of a managed identity as either a client or resource ID
+type ManagedIDKind interface {
+ fmt.Stringer
+ idKind() managedIdentityIDKind
+}
+
+// ClientID is the client ID of a user-assigned managed identity.
+type ClientID string
+
+func (ClientID) idKind() managedIdentityIDKind {
+ return miClientID
+}
+
+// String returns the string value of the ID.
+func (c ClientID) String() string {
+ return string(c)
+}
+
+// ResourceID is the resource ID of a user-assigned managed identity.
+type ResourceID string
+
+func (ResourceID) idKind() managedIdentityIDKind {
+ return miResourceID
+}
+
+// String returns the string value of the ID.
+func (r ResourceID) String() string {
+ return string(r)
+}
+
+// ManagedIdentityCredentialOptions contains optional parameters for ManagedIdentityCredential.
+type ManagedIdentityCredentialOptions struct {
+ azcore.ClientOptions
+
+ // ID is the ID of a managed identity the credential should authenticate. Set this field to use a specific identity
+ // instead of the hosting environment's default. The value may be the identity's client ID or resource ID, but note that
+ // some platforms don't accept resource IDs.
+ ID ManagedIDKind
+
+ // dac indicates whether the credential is part of DefaultAzureCredential. When true, and the environment doesn't have
+ // configuration for a specific managed identity API, the credential tries to determine whether IMDS is available before
+ // sending its first token request. It does this by sending a malformed request with a short timeout. Any response to that
+ // request is taken to mean IMDS is available, in which case the credential will send ordinary token requests thereafter
+ // with no special timeout. The purpose of this behavior is to prevent a very long timeout when IMDS isn't available.
+ dac bool
+}
+
+// ManagedIdentityCredential authenticates an Azure managed identity in any hosting environment supporting managed identities.
+// This credential authenticates a system-assigned identity by default. Use ManagedIdentityCredentialOptions.ID to specify a
+// user-assigned identity. See Microsoft Entra ID documentation for more information about managed identities:
+// https://learn.microsoft.com/entra/identity/managed-identities-azure-resources/overview
+type ManagedIdentityCredential struct {
+ client *confidentialClient
+ mic *managedIdentityClient
+}
+
+// NewManagedIdentityCredential creates a ManagedIdentityCredential. Pass nil to accept default options.
+func NewManagedIdentityCredential(options *ManagedIdentityCredentialOptions) (*ManagedIdentityCredential, error) {
+ if options == nil {
+ options = &ManagedIdentityCredentialOptions{}
+ }
+ mic, err := newManagedIdentityClient(options)
+ if err != nil {
+ return nil, err
+ }
+ cred := confidential.NewCredFromTokenProvider(mic.provideToken)
+
+ // It's okay to give MSAL an invalid client ID because MSAL will use it only as part of a cache key.
+ // ManagedIdentityClient handles all the details of authentication and won't receive this value from MSAL.
+ clientID := "SYSTEM-ASSIGNED-MANAGED-IDENTITY"
+ if options.ID != nil {
+ clientID = options.ID.String()
+ }
+ // similarly, it's okay to give MSAL an incorrect tenant because MSAL won't use the value
+ c, err := newConfidentialClient("common", clientID, credNameManagedIdentity, cred, confidentialClientOptions{
+ ClientOptions: options.ClientOptions,
+ })
+ if err != nil {
+ return nil, err
+ }
+ return &ManagedIdentityCredential{client: c, mic: mic}, nil
+}
+
+// GetToken requests an access token from the hosting environment. This method is called automatically by Azure SDK clients.
+func (c *ManagedIdentityCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameManagedIdentity+"."+traceOpGetToken, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+
+ if len(opts.Scopes) != 1 {
+ err = fmt.Errorf("%s.GetToken() requires exactly one scope", credNameManagedIdentity)
+ return azcore.AccessToken{}, err
+ }
+ // managed identity endpoints require a Microsoft Entra ID v1 resource (i.e. token audience), not a v2 scope, so we remove "/.default" here
+ opts.Scopes = []string{strings.TrimSuffix(opts.Scopes[0], defaultSuffix)}
+ tk, err := c.client.GetToken(ctx, opts)
+ return tk, err
+}
+
+var _ azcore.TokenCredential = (*ManagedIdentityCredential)(nil)
@@ -0,0 +1,113 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "crypto"
+ "crypto/x509"
+ "errors"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/confidential"
+)
+
+const credNameOBO = "OnBehalfOfCredential"
+
+// OnBehalfOfCredential authenticates a service principal via the on-behalf-of flow. This is typically used by
+// middle-tier services that authorize requests to other services with a delegated user identity. Because this
+// is not an interactive authentication flow, an application using it must have admin consent for any delegated
+// permissions before requesting tokens for them. See [Microsoft Entra ID documentation] for more details.
+//
+// [Microsoft Entra ID documentation]: https://learn.microsoft.com/entra/identity-platform/v2-oauth2-on-behalf-of-flow
+type OnBehalfOfCredential struct {
+ client *confidentialClient
+}
+
+// OnBehalfOfCredentialOptions contains optional parameters for OnBehalfOfCredential
+type OnBehalfOfCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire tokens.
+ // Add the wildcard value "*" to allow the credential to acquire tokens for any tenant in which the
+ // application is registered.
+ AdditionallyAllowedTenants []string
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+
+ // SendCertificateChain applies only when the credential is configured to authenticate with a certificate.
+ // This setting controls whether the credential sends the public certificate chain in the x5c header of each
+ // token request's JWT. This is required for, and only used in, Subject Name/Issuer (SNI) authentication.
+ SendCertificateChain bool
+}
+
+// NewOnBehalfOfCredentialWithCertificate constructs an OnBehalfOfCredential that authenticates with a certificate.
+// See [ParseCertificates] for help loading a certificate.
+func NewOnBehalfOfCredentialWithCertificate(tenantID, clientID, userAssertion string, certs []*x509.Certificate, key crypto.PrivateKey, options *OnBehalfOfCredentialOptions) (*OnBehalfOfCredential, error) {
+ cred, err := confidential.NewCredFromCert(certs, key)
+ if err != nil {
+ return nil, err
+ }
+ return newOnBehalfOfCredential(tenantID, clientID, userAssertion, cred, options)
+}
+
+// NewOnBehalfOfCredentialWithClientAssertions constructs an OnBehalfOfCredential that authenticates with client assertions.
+// userAssertion is the user's access token for the application. The getAssertion function should return client assertions
+// that authenticate the application to Microsoft Entra ID, such as federated credentials.
+func NewOnBehalfOfCredentialWithClientAssertions(tenantID, clientID, userAssertion string, getAssertion func(context.Context) (string, error), options *OnBehalfOfCredentialOptions) (*OnBehalfOfCredential, error) {
+ if getAssertion == nil {
+ return nil, errors.New("getAssertion can't be nil. It must be a function that returns client assertions")
+ }
+ cred := confidential.NewCredFromAssertionCallback(func(ctx context.Context, _ confidential.AssertionRequestOptions) (string, error) {
+ return getAssertion(ctx)
+ })
+ return newOnBehalfOfCredential(tenantID, clientID, userAssertion, cred, options)
+}
+
+// NewOnBehalfOfCredentialWithSecret constructs an OnBehalfOfCredential that authenticates with a client secret.
+func NewOnBehalfOfCredentialWithSecret(tenantID, clientID, userAssertion, clientSecret string, options *OnBehalfOfCredentialOptions) (*OnBehalfOfCredential, error) {
+ cred, err := confidential.NewCredFromSecret(clientSecret)
+ if err != nil {
+ return nil, err
+ }
+ return newOnBehalfOfCredential(tenantID, clientID, userAssertion, cred, options)
+}
+
+func newOnBehalfOfCredential(tenantID, clientID, userAssertion string, cred confidential.Credential, options *OnBehalfOfCredentialOptions) (*OnBehalfOfCredential, error) {
+ if options == nil {
+ options = &OnBehalfOfCredentialOptions{}
+ }
+ opts := confidentialClientOptions{
+ AdditionallyAllowedTenants: options.AdditionallyAllowedTenants,
+ Assertion: userAssertion,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ SendX5C: options.SendCertificateChain,
+ }
+ c, err := newConfidentialClient(tenantID, clientID, credNameOBO, cred, opts)
+ if err != nil {
+ return nil, err
+ }
+ return &OnBehalfOfCredential{c}, nil
+}
+
+// GetToken requests an access token from Microsoft Entra ID. This method is called automatically by Azure SDK clients.
+func (o *OnBehalfOfCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameOBO+"."+traceOpGetToken, o.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := o.client.GetToken(ctx, opts)
+ return tk, err
+}
+
+var _ azcore.TokenCredential = (*OnBehalfOfCredential)(nil)
@@ -0,0 +1,273 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+ "sync"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/cloud"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+ "github.com/Azure/azure-sdk-for-go/sdk/azidentity/internal"
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/log"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/public"
+
+ // this import ensures well-known configurations in azcore/cloud have ARM audiences for Authenticate()
+ _ "github.com/Azure/azure-sdk-for-go/sdk/azcore/arm/runtime"
+)
+
+type publicClientOptions struct {
+ azcore.ClientOptions
+
+ AdditionallyAllowedTenants []string
+ DeviceCodePrompt func(context.Context, DeviceCodeMessage) error
+ DisableAutomaticAuthentication bool
+ DisableInstanceDiscovery bool
+ LoginHint, RedirectURL string
+ Record authenticationRecord
+ TokenCachePersistenceOptions *tokenCachePersistenceOptions
+ Username, Password string
+}
+
+// publicClient wraps the MSAL public client
+type publicClient struct {
+ cae, noCAE msalPublicClient
+ caeMu, noCAEMu, clientMu *sync.Mutex
+ clientID, tenantID string
+ defaultScope []string
+ host string
+ name string
+ opts publicClientOptions
+ record authenticationRecord
+ azClient *azcore.Client
+}
+
+var errScopeRequired = errors.New("authenticating in this environment requires specifying a scope in TokenRequestOptions")
+
+func newPublicClient(tenantID, clientID, name string, o publicClientOptions) (*publicClient, error) {
+ if !validTenantID(tenantID) {
+ return nil, errInvalidTenantID
+ }
+ host, err := setAuthorityHost(o.Cloud)
+ if err != nil {
+ return nil, err
+ }
+ // if the application specified a cloud configuration, use its ARM audience as the default scope for Authenticate()
+ audience := o.Cloud.Services[cloud.ResourceManager].Audience
+ if audience == "" {
+ // no cloud configuration, or no ARM audience, specified; try to map the host to a well-known one (all of which have a trailing slash)
+ if !strings.HasSuffix(host, "/") {
+ host += "/"
+ }
+ switch host {
+ case cloud.AzureChina.ActiveDirectoryAuthorityHost:
+ audience = cloud.AzureChina.Services[cloud.ResourceManager].Audience
+ case cloud.AzureGovernment.ActiveDirectoryAuthorityHost:
+ audience = cloud.AzureGovernment.Services[cloud.ResourceManager].Audience
+ case cloud.AzurePublic.ActiveDirectoryAuthorityHost:
+ audience = cloud.AzurePublic.Services[cloud.ResourceManager].Audience
+ }
+ }
+ // if we didn't come up with an audience, the application will have to specify a scope for Authenticate()
+ var defaultScope []string
+ if audience != "" {
+ defaultScope = []string{audience + defaultSuffix}
+ }
+ client, err := azcore.NewClient(module, version, runtime.PipelineOptions{
+ Tracing: runtime.TracingOptions{
+ Namespace: traceNamespace,
+ },
+ }, &o.ClientOptions)
+ if err != nil {
+ return nil, err
+ }
+ o.AdditionallyAllowedTenants = resolveAdditionalTenants(o.AdditionallyAllowedTenants)
+ return &publicClient{
+ caeMu: &sync.Mutex{},
+ clientID: clientID,
+ clientMu: &sync.Mutex{},
+ defaultScope: defaultScope,
+ host: host,
+ name: name,
+ noCAEMu: &sync.Mutex{},
+ opts: o,
+ record: o.Record,
+ tenantID: tenantID,
+ azClient: client,
+ }, nil
+}
+
+func (p *publicClient) Authenticate(ctx context.Context, tro *policy.TokenRequestOptions) (authenticationRecord, error) {
+ if tro == nil {
+ tro = &policy.TokenRequestOptions{}
+ }
+ if len(tro.Scopes) == 0 {
+ if p.defaultScope == nil {
+ return authenticationRecord{}, errScopeRequired
+ }
+ tro.Scopes = p.defaultScope
+ }
+ client, mu, err := p.client(*tro)
+ if err != nil {
+ return authenticationRecord{}, err
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ _, err = p.reqToken(ctx, client, *tro)
+ if err == nil {
+ scope := strings.Join(tro.Scopes, ", ")
+ msg := fmt.Sprintf("%s.Authenticate() acquired a token for scope %q", p.name, scope)
+ log.Write(EventAuthentication, msg)
+ }
+ return p.record, err
+}
+
+// GetToken requests an access token from MSAL, checking the cache first.
+func (p *publicClient) GetToken(ctx context.Context, tro policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ if len(tro.Scopes) < 1 {
+ return azcore.AccessToken{}, fmt.Errorf("%s.GetToken() requires at least one scope", p.name)
+ }
+ tenant, err := p.resolveTenant(tro.TenantID)
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+ client, mu, err := p.client(tro)
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ ar, err := client.AcquireTokenSilent(ctx, tro.Scopes, public.WithSilentAccount(p.record.account()), public.WithClaims(tro.Claims), public.WithTenantID(tenant))
+ if err == nil {
+ return p.token(ar, err)
+ }
+ if p.opts.DisableAutomaticAuthentication {
+ return azcore.AccessToken{}, newauthenticationRequiredError(p.name, tro)
+ }
+ at, err := p.reqToken(ctx, client, tro)
+ if err == nil {
+ msg := fmt.Sprintf("%s.GetToken() acquired a token for scope %q", p.name, strings.Join(ar.GrantedScopes, ", "))
+ log.Write(EventAuthentication, msg)
+ }
+ return at, err
+}
+
+// reqToken requests a token from the MSAL public client. It's separate from GetToken() to enable Authenticate() to bypass the cache.
+func (p *publicClient) reqToken(ctx context.Context, c msalPublicClient, tro policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ tenant, err := p.resolveTenant(tro.TenantID)
+ if err != nil {
+ return azcore.AccessToken{}, err
+ }
+ var ar public.AuthResult
+ switch p.name {
+ case credNameBrowser:
+ ar, err = c.AcquireTokenInteractive(ctx, tro.Scopes,
+ public.WithClaims(tro.Claims),
+ public.WithLoginHint(p.opts.LoginHint),
+ public.WithRedirectURI(p.opts.RedirectURL),
+ public.WithTenantID(tenant),
+ )
+ case credNameDeviceCode:
+ dc, e := c.AcquireTokenByDeviceCode(ctx, tro.Scopes, public.WithClaims(tro.Claims), public.WithTenantID(tenant))
+ if e != nil {
+ return azcore.AccessToken{}, e
+ }
+ err = p.opts.DeviceCodePrompt(ctx, DeviceCodeMessage{
+ Message: dc.Result.Message,
+ UserCode: dc.Result.UserCode,
+ VerificationURL: dc.Result.VerificationURL,
+ })
+ if err == nil {
+ ar, err = dc.AuthenticationResult(ctx)
+ }
+ case credNameUserPassword:
+ ar, err = c.AcquireTokenByUsernamePassword(ctx, tro.Scopes, p.opts.Username, p.opts.Password, public.WithClaims(tro.Claims), public.WithTenantID(tenant))
+ default:
+ return azcore.AccessToken{}, fmt.Errorf("unknown credential %q", p.name)
+ }
+ return p.token(ar, err)
+}
+
+func (p *publicClient) client(tro policy.TokenRequestOptions) (msalPublicClient, *sync.Mutex, error) {
+ p.clientMu.Lock()
+ defer p.clientMu.Unlock()
+ if tro.EnableCAE {
+ if p.cae == nil {
+ client, err := p.newMSALClient(true)
+ if err != nil {
+ return nil, nil, err
+ }
+ p.cae = client
+ }
+ return p.cae, p.caeMu, nil
+ }
+ if p.noCAE == nil {
+ client, err := p.newMSALClient(false)
+ if err != nil {
+ return nil, nil, err
+ }
+ p.noCAE = client
+ }
+ return p.noCAE, p.noCAEMu, nil
+}
+
+func (p *publicClient) newMSALClient(enableCAE bool) (msalPublicClient, error) {
+ cache, err := internal.NewCache(p.opts.TokenCachePersistenceOptions, enableCAE)
+ if err != nil {
+ return nil, err
+ }
+ o := []public.Option{
+ public.WithAuthority(runtime.JoinPaths(p.host, p.tenantID)),
+ public.WithCache(cache),
+ public.WithHTTPClient(p),
+ }
+ if enableCAE {
+ o = append(o, public.WithClientCapabilities(cp1))
+ }
+ if p.opts.DisableInstanceDiscovery || strings.ToLower(p.tenantID) == "adfs" {
+ o = append(o, public.WithInstanceDiscovery(false))
+ }
+ return public.New(p.clientID, o...)
+}
+
+func (p *publicClient) token(ar public.AuthResult, err error) (azcore.AccessToken, error) {
+ if err == nil {
+ p.record, err = newAuthenticationRecord(ar)
+ } else {
+ res := getResponseFromError(err)
+ err = newAuthenticationFailedError(p.name, err.Error(), res, err)
+ }
+ return azcore.AccessToken{Token: ar.AccessToken, ExpiresOn: ar.ExpiresOn.UTC()}, err
+}
+
+// resolveTenant returns the correct WithTenantID() argument for a token request given the client's
+// configuration, or an error when that configuration doesn't allow the specified tenant
+func (p *publicClient) resolveTenant(specified string) (string, error) {
+ t, err := resolveTenant(p.tenantID, specified, p.name, p.opts.AdditionallyAllowedTenants)
+ if t == p.tenantID {
+ // callers pass this value to MSAL's WithTenantID(). There's no need to redundantly specify
+ // the client's default tenant and doing so is an error when that tenant is "organizations"
+ t = ""
+ }
+ return t, err
+}
+
+// these methods satisfy the MSAL ops.HTTPClient interface
+
+func (p *publicClient) CloseIdleConnections() {
+ // do nothing
+}
+
+func (p *publicClient) Do(r *http.Request) (*http.Response, error) {
+ return doForClient(p.azClient, r)
+}
@@ -0,0 +1,112 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+# IMPORTANT: Do not invoke this file directly. Please instead run eng/common/TestResources/New-TestResources.ps1 from the repository root.
+
+param (
+ [hashtable] $AdditionalParameters = @{},
+ [hashtable] $DeploymentOutputs
+)
+
+$ErrorActionPreference = 'Stop'
+$PSNativeCommandUseErrorActionPreference = $true
+
+if ($CI) {
+ if (!$AdditionalParameters['deployResources']) {
+ Write-Host "Skipping post-provisioning script because resources weren't deployed"
+ return
+ }
+ az login --service-principal -u $DeploymentOutputs['AZIDENTITY_CLIENT_ID'] -p $DeploymentOutputs['AZIDENTITY_CLIENT_SECRET'] --tenant $DeploymentOutputs['AZIDENTITY_TENANT_ID']
+ az account set --subscription $DeploymentOutputs['AZIDENTITY_SUBSCRIPTION_ID']
+}
+
+Write-Host "Building container"
+$image = "$($DeploymentOutputs['AZIDENTITY_ACR_LOGIN_SERVER'])/azidentity-managed-id-test"
+Set-Content -Path "$PSScriptRoot/Dockerfile" -Value @"
+FROM mcr.microsoft.com/oss/go/microsoft/golang:latest as builder
+ENV GOARCH=amd64 GOWORK=off
+COPY . /azidentity
+WORKDIR /azidentity/testdata/managed-id-test
+RUN go mod tidy
+RUN go build -o /build/managed-id-test .
+RUN GOOS=windows go build -o /build/managed-id-test.exe .
+
+FROM mcr.microsoft.com/mirror/docker/library/alpine:3.16
+RUN apk add gcompat
+COPY --from=builder /build/* .
+RUN chmod +x managed-id-test
+CMD ["./managed-id-test"]
+"@
+# build from sdk/azidentity because we need that dir in the context (because the test app uses local azidentity)
+docker build -t $image "$PSScriptRoot"
+az acr login -n $DeploymentOutputs['AZIDENTITY_ACR_NAME']
+docker push $image
+
+$rg = $DeploymentOutputs['AZIDENTITY_RESOURCE_GROUP']
+
+# ACI is easier to provision here than in the bicep file because the image isn't available before now
+Write-Host "Deploying Azure Container Instance"
+$aciName = "azidentity-test"
+az container create -g $rg -n $aciName --image $image `
+ --acr-identity $($DeploymentOutputs['AZIDENTITY_USER_ASSIGNED_IDENTITY']) `
+ --assign-identity [system] $($DeploymentOutputs['AZIDENTITY_USER_ASSIGNED_IDENTITY']) `
+ --role "Storage Blob Data Reader" `
+ --scope $($DeploymentOutputs['AZIDENTITY_STORAGE_ID']) `
+ -e AZIDENTITY_STORAGE_NAME=$($DeploymentOutputs['AZIDENTITY_STORAGE_NAME']) `
+ AZIDENTITY_STORAGE_NAME_USER_ASSIGNED=$($DeploymentOutputs['AZIDENTITY_STORAGE_NAME_USER_ASSIGNED']) `
+ AZIDENTITY_USER_ASSIGNED_IDENTITY=$($DeploymentOutputs['AZIDENTITY_USER_ASSIGNED_IDENTITY']) `
+ FUNCTIONS_CUSTOMHANDLER_PORT=80
+Write-Host "##vso[task.setvariable variable=AZIDENTITY_ACI_NAME;]$aciName"
+
+# Azure Functions deployment: copy the Windows binary from the Docker image, deploy it in a zip
+Write-Host "Deploying to Azure Functions"
+$container = docker create $image
+docker cp ${container}:managed-id-test.exe "$PSScriptRoot/testdata/managed-id-test/"
+docker rm -v $container
+Compress-Archive -Path "$PSScriptRoot/testdata/managed-id-test/*" -DestinationPath func.zip -Force
+az functionapp deploy -g $rg -n $DeploymentOutputs['AZIDENTITY_FUNCTION_NAME'] --src-path func.zip --type zip
+
+Write-Host "Creating federated identity"
+$aksName = $DeploymentOutputs['AZIDENTITY_AKS_NAME']
+$idName = $DeploymentOutputs['AZIDENTITY_USER_ASSIGNED_IDENTITY_NAME']
+$issuer = az aks show -g $rg -n $aksName --query "oidcIssuerProfile.issuerUrl" -otsv
+$podName = "azidentity-test"
+$serviceAccountName = "workload-identity-sa"
+az identity federated-credential create -g $rg --identity-name $idName --issuer $issuer --name $idName --subject system:serviceaccount:default:$serviceAccountName
+Write-Host "Deploying to AKS"
+az aks get-credentials -g $rg -n $aksName
+az aks update --attach-acr $DeploymentOutputs['AZIDENTITY_ACR_NAME'] -g $rg -n $aksName
+Set-Content -Path "$PSScriptRoot/k8s.yaml" -Value @"
+apiVersion: v1
+kind: ServiceAccount
+metadata:
+ annotations:
+ azure.workload.identity/client-id: $($DeploymentOutputs['AZIDENTITY_USER_ASSIGNED_IDENTITY_CLIENT_ID'])
+ name: $serviceAccountName
+ namespace: default
+---
+apiVersion: v1
+kind: Pod
+metadata:
+ name: $podName
+ namespace: default
+ labels:
+ app: $podName
+ azure.workload.identity/use: "true"
+spec:
+ serviceAccountName: $serviceAccountName
+ containers:
+ - name: $podName
+ image: $image
+ env:
+ - name: AZIDENTITY_STORAGE_NAME
+ value: $($DeploymentOutputs['AZIDENTITY_STORAGE_NAME_USER_ASSIGNED'])
+ - name: AZIDENTITY_USE_WORKLOAD_IDENTITY
+ value: "true"
+ - name: FUNCTIONS_CUSTOMHANDLER_PORT
+ value: "80"
+ nodeSelector:
+ kubernetes.io/os: linux
+"@
+kubectl apply -f "$PSScriptRoot/k8s.yaml"
+Write-Host "##vso[task.setvariable variable=AZIDENTITY_POD_NAME;]$podName"
@@ -0,0 +1,44 @@
+# Copyright (c) Microsoft Corporation. All rights reserved.
+# Licensed under the MIT License.
+
+# IMPORTANT: Do not invoke this file directly. Please instead run eng/common/TestResources/New-TestResources.ps1 from the repository root.
+
+[CmdletBinding(SupportsShouldProcess = $true, ConfirmImpact = 'Medium')]
+param (
+ [hashtable] $AdditionalParameters = @{},
+
+ # Captures any arguments from eng/New-TestResources.ps1 not declared here (no parameter errors).
+ [Parameter(ValueFromRemainingArguments = $true)]
+ $RemainingArguments
+)
+
+if (-not (Test-Path "$PSScriptRoot/sshkey.pub")) {
+ ssh-keygen -t rsa -b 4096 -f "$PSScriptRoot/sshkey" -N '' -C ''
+}
+$templateFileParameters['sshPubKey'] = Get-Content "$PSScriptRoot/sshkey.pub"
+
+if (!$CI) {
+ # TODO: Remove this once auto-cloud config downloads are supported locally
+ Write-Host "Skipping cert setup in local testing mode"
+ return
+}
+
+if ($null -eq $EnvironmentVariables -or $EnvironmentVariables.Count -eq 0) {
+ throw "EnvironmentVariables must be set in the calling script New-TestResources.ps1"
+}
+
+$tmp = $env:TEMP ? $env:TEMP : [System.IO.Path]::GetTempPath()
+$pfxPath = Join-Path $tmp "test.pfx"
+$pemPath = Join-Path $tmp "test.pem"
+
+Write-Host "Creating identity test files: $pfxPath $pemPath"
+
+[System.Convert]::FromBase64String($EnvironmentVariables['PFX_CONTENTS']) | Set-Content -Path $pfxPath -AsByteStream
+Set-Content -Path $pemPath -Value $EnvironmentVariables['PEM_CONTENTS']
+
+# Set for pipeline
+Write-Host "##vso[task.setvariable variable=IDENTITY_SP_CERT_PFX;]$pfxPath"
+Write-Host "##vso[task.setvariable variable=IDENTITY_SP_CERT_PEM;]$pemPath"
+# Set for local
+$env:IDENTITY_SP_CERT_PFX = $pfxPath
+$env:IDENTITY_SP_CERT_PEM = $pemPath
@@ -0,0 +1,219 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT License.
+
+@description('Kubernetes cluster admin user name.')
+param adminUser string = 'azureuser'
+
+@minLength(6)
+@maxLength(23)
+@description('The base resource name.')
+param baseName string = resourceGroup().name
+
+@description('Whether to deploy resources. When set to false, this file deploys nothing.')
+param deployResources bool = false
+
+param sshPubKey string = ''
+
+@description('The location of the resource. By default, this is the same as the resource group.')
+param location string = resourceGroup().location
+
+// https://learn.microsoft.com/azure/role-based-access-control/built-in-roles
+var acrPull = subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '7f951dda-4ed3-4680-a7ca-43fe172d538d')
+var blobReader = subscriptionResourceId('Microsoft.Authorization/roleDefinitions', '2a2b9908-6ea1-4ae2-8e65-a410df84e7d1')
+
+resource sa 'Microsoft.Storage/storageAccounts@2021-08-01' = if (deployResources) {
+ kind: 'StorageV2'
+ location: location
+ name: 'sa${uniqueString(baseName)}'
+ properties: {
+ accessTier: 'Hot'
+ }
+ sku: {
+ name: 'Standard_LRS'
+ }
+}
+
+resource saUserAssigned 'Microsoft.Storage/storageAccounts@2021-08-01' = if (deployResources) {
+ kind: 'StorageV2'
+ location: location
+ name: 'sa2${uniqueString(baseName)}'
+ properties: {
+ accessTier: 'Hot'
+ }
+ sku: {
+ name: 'Standard_LRS'
+ }
+}
+
+resource usermgdid 'Microsoft.ManagedIdentity/userAssignedIdentities@2018-11-30' = if (deployResources) {
+ location: location
+ name: baseName
+}
+
+resource acrPullContainerInstance 'Microsoft.Authorization/roleAssignments@2022-04-01' = if (deployResources) {
+ name: guid(resourceGroup().id, acrPull, 'containerInstance')
+ properties: {
+ principalId: deployResources ? usermgdid.properties.principalId : ''
+ principalType: 'ServicePrincipal'
+ roleDefinitionId: acrPull
+ }
+ scope: containerRegistry
+}
+
+resource blobRoleUserAssigned 'Microsoft.Authorization/roleAssignments@2022-04-01' = if (deployResources) {
+ scope: saUserAssigned
+ name: guid(resourceGroup().id, blobReader, usermgdid.id)
+ properties: {
+ principalId: deployResources ? usermgdid.properties.principalId : ''
+ principalType: 'ServicePrincipal'
+ roleDefinitionId: blobReader
+ }
+}
+
+resource blobRoleFunc 'Microsoft.Authorization/roleAssignments@2022-04-01' = if (deployResources) {
+ name: guid(resourceGroup().id, blobReader, 'azfunc')
+ properties: {
+ principalId: deployResources ? azfunc.identity.principalId : ''
+ roleDefinitionId: blobReader
+ principalType: 'ServicePrincipal'
+ }
+ scope: sa
+}
+
+resource containerRegistry 'Microsoft.ContainerRegistry/registries@2023-01-01-preview' = if (deployResources) {
+ location: location
+ name: uniqueString(resourceGroup().id)
+ properties: {
+ adminUserEnabled: true
+ }
+ sku: {
+ name: 'Basic'
+ }
+}
+
+resource farm 'Microsoft.Web/serverfarms@2021-03-01' = if (deployResources) {
+ kind: 'app'
+ location: location
+ name: '${baseName}_asp'
+ properties: {}
+ sku: {
+ capacity: 1
+ family: 'B'
+ name: 'B1'
+ size: 'B1'
+ tier: 'Basic'
+ }
+}
+
+resource azfunc 'Microsoft.Web/sites@2021-03-01' = if (deployResources) {
+ identity: {
+ type: 'SystemAssigned, UserAssigned'
+ userAssignedIdentities: {
+ '${deployResources ? usermgdid.id : ''}': {}
+ }
+ }
+ kind: 'functionapp'
+ location: location
+ name: '${baseName}func'
+ properties: {
+ enabled: true
+ httpsOnly: true
+ keyVaultReferenceIdentity: 'SystemAssigned'
+ serverFarmId: farm.id
+ siteConfig: {
+ alwaysOn: true
+ appSettings: [
+ {
+ name: 'AZIDENTITY_STORAGE_NAME'
+ value: deployResources ? sa.name : null
+ }
+ {
+ name: 'AZIDENTITY_STORAGE_NAME_USER_ASSIGNED'
+ value: deployResources ? saUserAssigned.name : null
+ }
+ {
+ name: 'AZIDENTITY_USER_ASSIGNED_IDENTITY'
+ value: deployResources ? usermgdid.id : null
+ }
+ {
+ name: 'AzureWebJobsStorage'
+ value: 'DefaultEndpointsProtocol=https;AccountName=${deployResources ? sa.name : ''};EndpointSuffix=${deployResources ? environment().suffixes.storage : ''};AccountKey=${deployResources ? sa.listKeys().keys[0].value : ''}'
+ }
+ {
+ name: 'FUNCTIONS_EXTENSION_VERSION'
+ value: '~4'
+ }
+ {
+ name: 'FUNCTIONS_WORKER_RUNTIME'
+ value: 'custom'
+ }
+ {
+ name: 'WEBSITE_CONTENTAZUREFILECONNECTIONSTRING'
+ value: 'DefaultEndpointsProtocol=https;AccountName=${deployResources ? sa.name : ''};EndpointSuffix=${deployResources ? environment().suffixes.storage : ''};AccountKey=${deployResources ? sa.listKeys().keys[0].value : ''}'
+ }
+ {
+ name: 'WEBSITE_CONTENTSHARE'
+ value: toLower('${baseName}-func')
+ }
+ ]
+ http20Enabled: true
+ minTlsVersion: '1.2'
+ }
+ }
+}
+
+resource aks 'Microsoft.ContainerService/managedClusters@2023-06-01' = if (deployResources) {
+ name: baseName
+ location: location
+ identity: {
+ type: 'SystemAssigned'
+ }
+ properties: {
+ agentPoolProfiles: [
+ {
+ count: 1
+ enableAutoScaling: false
+ kubeletDiskType: 'OS'
+ mode: 'System'
+ name: 'agentpool'
+ osDiskSizeGB: 128
+ osDiskType: 'Managed'
+ osSKU: 'Ubuntu'
+ osType: 'Linux'
+ type: 'VirtualMachineScaleSets'
+ vmSize: 'Standard_D2s_v3'
+ }
+ ]
+ dnsPrefix: 'identitytest'
+ enableRBAC: true
+ linuxProfile: {
+ adminUsername: adminUser
+ ssh: {
+ publicKeys: [
+ {
+ keyData: sshPubKey
+ }
+ ]
+ }
+ }
+ oidcIssuerProfile: {
+ enabled: true
+ }
+ securityProfile: {
+ workloadIdentity: {
+ enabled: true
+ }
+ }
+ }
+}
+
+output AZIDENTITY_ACR_LOGIN_SERVER string = deployResources ? containerRegistry.properties.loginServer : ''
+output AZIDENTITY_ACR_NAME string = deployResources ? containerRegistry.name : ''
+output AZIDENTITY_AKS_NAME string = deployResources ? aks.name : ''
+output AZIDENTITY_FUNCTION_NAME string = deployResources ? azfunc.name : ''
+output AZIDENTITY_STORAGE_ID string = deployResources ? sa.id : ''
+output AZIDENTITY_STORAGE_NAME string = deployResources ? sa.name : ''
+output AZIDENTITY_STORAGE_NAME_USER_ASSIGNED string = deployResources ? saUserAssigned.name : ''
+output AZIDENTITY_USER_ASSIGNED_IDENTITY string = deployResources ? usermgdid.id : ''
+output AZIDENTITY_USER_ASSIGNED_IDENTITY_CLIENT_ID string = deployResources ? usermgdid.properties.clientId : ''
+output AZIDENTITY_USER_ASSIGNED_IDENTITY_NAME string = deployResources ? usermgdid.name : ''
@@ -0,0 +1,90 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+)
+
+const credNameUserPassword = "UsernamePasswordCredential"
+
+// UsernamePasswordCredentialOptions contains optional parameters for UsernamePasswordCredential.
+type UsernamePasswordCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire tokens.
+ // Add the wildcard value "*" to allow the credential to acquire tokens for any tenant in which the
+ // application is registered.
+ AdditionallyAllowedTenants []string
+
+ // authenticationRecord returned by a call to a credential's Authenticate method. Set this option
+ // to enable the credential to use data from a previous authentication.
+ authenticationRecord authenticationRecord
+
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+
+ // tokenCachePersistenceOptions enables persistent token caching when not nil.
+ tokenCachePersistenceOptions *tokenCachePersistenceOptions
+}
+
+// UsernamePasswordCredential authenticates a user with a password. Microsoft doesn't recommend this kind of authentication,
+// because it's less secure than other authentication flows. This credential is not interactive, so it isn't compatible
+// with any form of multi-factor authentication, and the application must already have user or admin consent.
+// This credential can only authenticate work and school accounts; it can't authenticate Microsoft accounts.
+type UsernamePasswordCredential struct {
+ client *publicClient
+}
+
+// NewUsernamePasswordCredential creates a UsernamePasswordCredential. clientID is the ID of the application the user
+// will authenticate to. Pass nil for options to accept defaults.
+func NewUsernamePasswordCredential(tenantID string, clientID string, username string, password string, options *UsernamePasswordCredentialOptions) (*UsernamePasswordCredential, error) {
+ if options == nil {
+ options = &UsernamePasswordCredentialOptions{}
+ }
+ opts := publicClientOptions{
+ AdditionallyAllowedTenants: options.AdditionallyAllowedTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ Password: password,
+ Record: options.authenticationRecord,
+ TokenCachePersistenceOptions: options.tokenCachePersistenceOptions,
+ Username: username,
+ }
+ c, err := newPublicClient(tenantID, clientID, credNameUserPassword, opts)
+ if err != nil {
+ return nil, err
+ }
+ return &UsernamePasswordCredential{client: c}, err
+}
+
+// Authenticate the user. Subsequent calls to GetToken will automatically use the returned AuthenticationRecord.
+func (c *UsernamePasswordCredential) authenticate(ctx context.Context, opts *policy.TokenRequestOptions) (authenticationRecord, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameUserPassword+"."+traceOpAuthenticate, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.Authenticate(ctx, opts)
+ return tk, err
+}
+
+// GetToken requests an access token from Microsoft Entra ID. This method is called automatically by Azure SDK clients.
+func (c *UsernamePasswordCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameUserPassword+"."+traceOpGetToken, c.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := c.client.GetToken(ctx, opts)
+ return tk, err
+}
+
+var _ azcore.TokenCredential = (*UsernamePasswordCredential)(nil)
@@ -0,0 +1,18 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+const (
+ // UserAgent is the string to be used in the user agent string when making requests.
+ component = "azidentity"
+
+ // module is the fully qualified name of the module used in telemetry and distributed tracing.
+ module = "github.com/Azure/azure-sdk-for-go/sdk/" + component
+
+ // Version is the semantic version (see http://semver.org) of this module.
+ version = "v1.7.0"
+)
@@ -0,0 +1,131 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package azidentity
+
+import (
+ "context"
+ "errors"
+ "os"
+ "sync"
+ "time"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/policy"
+ "github.com/Azure/azure-sdk-for-go/sdk/azcore/runtime"
+)
+
+const credNameWorkloadIdentity = "WorkloadIdentityCredential"
+
+// WorkloadIdentityCredential supports Azure workload identity on Kubernetes.
+// See [Azure Kubernetes Service documentation] for more information.
+//
+// [Azure Kubernetes Service documentation]: https://learn.microsoft.com/azure/aks/workload-identity-overview
+type WorkloadIdentityCredential struct {
+ assertion, file string
+ cred *ClientAssertionCredential
+ expires time.Time
+ mtx *sync.RWMutex
+}
+
+// WorkloadIdentityCredentialOptions contains optional parameters for WorkloadIdentityCredential.
+type WorkloadIdentityCredentialOptions struct {
+ azcore.ClientOptions
+
+ // AdditionallyAllowedTenants specifies additional tenants for which the credential may acquire tokens.
+ // Add the wildcard value "*" to allow the credential to acquire tokens for any tenant in which the
+ // application is registered.
+ AdditionallyAllowedTenants []string
+ // ClientID of the service principal. Defaults to the value of the environment variable AZURE_CLIENT_ID.
+ ClientID string
+ // DisableInstanceDiscovery should be set true only by applications authenticating in disconnected clouds, or
+ // private clouds such as Azure Stack. It determines whether the credential requests Microsoft Entra instance metadata
+ // from https://login.microsoft.com before authenticating. Setting this to true will skip this request, making
+ // the application responsible for ensuring the configured authority is valid and trustworthy.
+ DisableInstanceDiscovery bool
+ // TenantID of the service principal. Defaults to the value of the environment variable AZURE_TENANT_ID.
+ TenantID string
+ // TokenFilePath is the path of a file containing a Kubernetes service account token. Defaults to the value of the
+ // environment variable AZURE_FEDERATED_TOKEN_FILE.
+ TokenFilePath string
+}
+
+// NewWorkloadIdentityCredential constructs a WorkloadIdentityCredential. Service principal configuration is read
+// from environment variables as set by the Azure workload identity webhook. Set options to override those values.
+func NewWorkloadIdentityCredential(options *WorkloadIdentityCredentialOptions) (*WorkloadIdentityCredential, error) {
+ if options == nil {
+ options = &WorkloadIdentityCredentialOptions{}
+ }
+ ok := false
+ clientID := options.ClientID
+ if clientID == "" {
+ if clientID, ok = os.LookupEnv(azureClientID); !ok {
+ return nil, errors.New("no client ID specified. Check pod configuration or set ClientID in the options")
+ }
+ }
+ file := options.TokenFilePath
+ if file == "" {
+ if file, ok = os.LookupEnv(azureFederatedTokenFile); !ok {
+ return nil, errors.New("no token file specified. Check pod configuration or set TokenFilePath in the options")
+ }
+ }
+ tenantID := options.TenantID
+ if tenantID == "" {
+ if tenantID, ok = os.LookupEnv(azureTenantID); !ok {
+ return nil, errors.New("no tenant ID specified. Check pod configuration or set TenantID in the options")
+ }
+ }
+ w := WorkloadIdentityCredential{file: file, mtx: &sync.RWMutex{}}
+ caco := ClientAssertionCredentialOptions{
+ AdditionallyAllowedTenants: options.AdditionallyAllowedTenants,
+ ClientOptions: options.ClientOptions,
+ DisableInstanceDiscovery: options.DisableInstanceDiscovery,
+ }
+ cred, err := NewClientAssertionCredential(tenantID, clientID, w.getAssertion, &caco)
+ if err != nil {
+ return nil, err
+ }
+ // we want "WorkloadIdentityCredential" in log messages, not "ClientAssertionCredential"
+ cred.client.name = credNameWorkloadIdentity
+ w.cred = cred
+ return &w, nil
+}
+
+// GetToken requests an access token from Microsoft Entra ID. Azure SDK clients call this method automatically.
+func (w *WorkloadIdentityCredential) GetToken(ctx context.Context, opts policy.TokenRequestOptions) (azcore.AccessToken, error) {
+ var err error
+ ctx, endSpan := runtime.StartSpan(ctx, credNameWorkloadIdentity+"."+traceOpGetToken, w.cred.client.azClient.Tracer(), nil)
+ defer func() { endSpan(err) }()
+ tk, err := w.cred.GetToken(ctx, opts)
+ return tk, err
+}
+
+// getAssertion returns the specified file's content, which is expected to be a Kubernetes service account token.
+// Kubernetes is responsible for updating the file as service account tokens expire.
+func (w *WorkloadIdentityCredential) getAssertion(context.Context) (string, error) {
+ w.mtx.RLock()
+ if w.expires.Before(time.Now()) {
+ // ensure only one goroutine at a time updates the assertion
+ w.mtx.RUnlock()
+ w.mtx.Lock()
+ defer w.mtx.Unlock()
+ // double check because another goroutine may have acquired the write lock first and done the update
+ if now := time.Now(); w.expires.Before(now) {
+ content, err := os.ReadFile(w.file)
+ if err != nil {
+ return "", err
+ }
+ w.assertion = string(content)
+ // Kubernetes rotates service account tokens when they reach 80% of their total TTL. The shortest TTL
+ // is 1 hour. That implies the token we just read is valid for at least 12 minutes (20% of 1 hour),
+ // but we add some margin for safety.
+ w.expires = now.Add(10 * time.Minute)
+ }
+ } else {
+ defer w.mtx.RUnlock()
+ }
+ return w.assertion, nil
+}
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) Microsoft Corporation.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE
@@ -0,0 +1,51 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package diag
+
+import (
+ "fmt"
+ "runtime"
+ "strings"
+)
+
+// Caller returns the file and line number of a frame on the caller's stack.
+// If the funtion fails an empty string is returned.
+// skipFrames - the number of frames to skip when determining the caller.
+// Passing a value of 0 will return the immediate caller of this function.
+func Caller(skipFrames int) string {
+ if pc, file, line, ok := runtime.Caller(skipFrames + 1); ok {
+ // the skipFrames + 1 is to skip ourselves
+ frame := runtime.FuncForPC(pc)
+ return fmt.Sprintf("%s()\n\t%s:%d", frame.Name(), file, line)
+ }
+ return ""
+}
+
+// StackTrace returns a formatted stack trace string.
+// If the funtion fails an empty string is returned.
+// skipFrames - the number of stack frames to skip before composing the trace string.
+// totalFrames - the maximum number of stack frames to include in the trace string.
+func StackTrace(skipFrames, totalFrames int) string {
+ pcCallers := make([]uintptr, totalFrames)
+ if frames := runtime.Callers(skipFrames, pcCallers); frames == 0 {
+ return ""
+ }
+ frames := runtime.CallersFrames(pcCallers)
+ sb := strings.Builder{}
+ for {
+ frame, more := frames.Next()
+ sb.WriteString(frame.Function)
+ sb.WriteString("()\n\t")
+ sb.WriteString(frame.File)
+ sb.WriteRune(':')
+ sb.WriteString(fmt.Sprintf("%d\n", frame.Line))
+ if !more {
+ break
+ }
+ }
+ return sb.String()
+}
@@ -0,0 +1,7 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package diag
@@ -0,0 +1,7 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package errorinfo
@@ -0,0 +1,46 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package errorinfo
+
+// NonRetriable represents a non-transient error. This works in
+// conjunction with the retry policy, indicating that the error condition
+// is idempotent, so no retries will be attempted.
+// Use errors.As() to access this interface in the error chain.
+type NonRetriable interface {
+ error
+ NonRetriable()
+}
+
+// NonRetriableError marks the specified error as non-retriable.
+// This function takes an error as input and returns a new error that is marked as non-retriable.
+func NonRetriableError(err error) error {
+ return &nonRetriableError{err}
+}
+
+// nonRetriableError is a struct that embeds the error interface.
+// It is used to represent errors that should not be retried.
+type nonRetriableError struct {
+ error
+}
+
+// Error method for nonRetriableError struct.
+// It returns the error message of the embedded error.
+func (p *nonRetriableError) Error() string {
+ return p.error.Error()
+}
+
+// NonRetriable is a marker method for nonRetriableError struct.
+// Non-functional and indicates that the error is non-retriable.
+func (*nonRetriableError) NonRetriable() {
+ // marker method
+}
+
+// Unwrap method for nonRetriableError struct.
+// It returns the original error that was marked as non-retriable.
+func (p *nonRetriableError) Unwrap() error {
+ return p.error
+}
@@ -0,0 +1,129 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package exported
+
+import (
+ "errors"
+ "io"
+ "net/http"
+)
+
+// HasStatusCode returns true if the Response's status code is one of the specified values.
+// Exported as runtime.HasStatusCode().
+func HasStatusCode(resp *http.Response, statusCodes ...int) bool {
+ if resp == nil {
+ return false
+ }
+ for _, sc := range statusCodes {
+ if resp.StatusCode == sc {
+ return true
+ }
+ }
+ return false
+}
+
+// PayloadOptions contains the optional values for the Payload func.
+// NOT exported but used by azcore.
+type PayloadOptions struct {
+ // BytesModifier receives the downloaded byte slice and returns an updated byte slice.
+ // Use this to modify the downloaded bytes in a payload (e.g. removing a BOM).
+ BytesModifier func([]byte) []byte
+}
+
+// Payload reads and returns the response body or an error.
+// On a successful read, the response body is cached.
+// Subsequent reads will access the cached value.
+// Exported as runtime.Payload() WITHOUT the opts parameter.
+func Payload(resp *http.Response, opts *PayloadOptions) ([]byte, error) {
+ if resp.Body == nil {
+ // this shouldn't happen in real-world scenarios as a
+ // response with no body should set it to http.NoBody
+ return nil, nil
+ }
+ modifyBytes := func(b []byte) []byte { return b }
+ if opts != nil && opts.BytesModifier != nil {
+ modifyBytes = opts.BytesModifier
+ }
+
+ // r.Body won't be a nopClosingBytesReader if downloading was skipped
+ if buf, ok := resp.Body.(*nopClosingBytesReader); ok {
+ bytesBody := modifyBytes(buf.Bytes())
+ buf.Set(bytesBody)
+ return bytesBody, nil
+ }
+
+ bytesBody, err := io.ReadAll(resp.Body)
+ resp.Body.Close()
+ if err != nil {
+ return nil, err
+ }
+
+ bytesBody = modifyBytes(bytesBody)
+ resp.Body = &nopClosingBytesReader{s: bytesBody}
+ return bytesBody, nil
+}
+
+// PayloadDownloaded returns true if the response body has already been downloaded.
+// This implies that the Payload() func above has been previously called.
+// NOT exported but used by azcore.
+func PayloadDownloaded(resp *http.Response) bool {
+ _, ok := resp.Body.(*nopClosingBytesReader)
+ return ok
+}
+
+// nopClosingBytesReader is an io.ReadSeekCloser around a byte slice.
+// It also provides direct access to the byte slice to avoid rereading.
+type nopClosingBytesReader struct {
+ s []byte
+ i int64
+}
+
+// Bytes returns the underlying byte slice.
+func (r *nopClosingBytesReader) Bytes() []byte {
+ return r.s
+}
+
+// Close implements the io.Closer interface.
+func (*nopClosingBytesReader) Close() error {
+ return nil
+}
+
+// Read implements the io.Reader interface.
+func (r *nopClosingBytesReader) Read(b []byte) (n int, err error) {
+ if r.i >= int64(len(r.s)) {
+ return 0, io.EOF
+ }
+ n = copy(b, r.s[r.i:])
+ r.i += int64(n)
+ return
+}
+
+// Set replaces the existing byte slice with the specified byte slice and resets the reader.
+func (r *nopClosingBytesReader) Set(b []byte) {
+ r.s = b
+ r.i = 0
+}
+
+// Seek implements the io.Seeker interface.
+func (r *nopClosingBytesReader) Seek(offset int64, whence int) (int64, error) {
+ var i int64
+ switch whence {
+ case io.SeekStart:
+ i = offset
+ case io.SeekCurrent:
+ i = r.i + offset
+ case io.SeekEnd:
+ i = int64(len(r.s)) + offset
+ default:
+ return 0, errors.New("nopClosingBytesReader: invalid whence")
+ }
+ if i < 0 {
+ return 0, errors.New("nopClosingBytesReader: negative position")
+ }
+ r.i = i
+ return i, nil
+}
@@ -0,0 +1,7 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package log
@@ -0,0 +1,104 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package log
+
+import (
+ "fmt"
+ "os"
+ "time"
+)
+
+///////////////////////////////////////////////////////////////////////////////////////////////////
+// NOTE: The following are exported as public surface area from azcore. DO NOT MODIFY
+///////////////////////////////////////////////////////////////////////////////////////////////////
+
+// Event is used to group entries. Each group can be toggled on or off.
+type Event string
+
+// SetEvents is used to control which events are written to
+// the log. By default all log events are writen.
+func SetEvents(cls ...Event) {
+ log.cls = cls
+}
+
+// SetListener will set the Logger to write to the specified listener.
+func SetListener(lst func(Event, string)) {
+ log.lst = lst
+}
+
+///////////////////////////////////////////////////////////////////////////////////////////////////
+// END PUBLIC SURFACE AREA
+///////////////////////////////////////////////////////////////////////////////////////////////////
+
+// Should returns true if the specified log event should be written to the log.
+// By default all log events will be logged. Call SetEvents() to limit
+// the log events for logging.
+// If no listener has been set this will return false.
+// Calling this method is useful when the message to log is computationally expensive
+// and you want to avoid the overhead if its log event is not enabled.
+func Should(cls Event) bool {
+ if log.lst == nil {
+ return false
+ }
+ if log.cls == nil || len(log.cls) == 0 {
+ return true
+ }
+ for _, c := range log.cls {
+ if c == cls {
+ return true
+ }
+ }
+ return false
+}
+
+// Write invokes the underlying listener with the specified event and message.
+// If the event shouldn't be logged or there is no listener then Write does nothing.
+func Write(cls Event, message string) {
+ if !Should(cls) {
+ return
+ }
+ log.lst(cls, message)
+}
+
+// Writef invokes the underlying listener with the specified event and formatted message.
+// If the event shouldn't be logged or there is no listener then Writef does nothing.
+func Writef(cls Event, format string, a ...interface{}) {
+ if !Should(cls) {
+ return
+ }
+ log.lst(cls, fmt.Sprintf(format, a...))
+}
+
+// TestResetEvents is used for TESTING PURPOSES ONLY.
+func TestResetEvents() {
+ log.cls = nil
+}
+
+// logger controls which events to log and writing to the underlying log.
+type logger struct {
+ cls []Event
+ lst func(Event, string)
+}
+
+// the process-wide logger
+var log logger
+
+func init() {
+ initLogging()
+}
+
+// split out for testing purposes
+func initLogging() {
+ if cls := os.Getenv("AZURE_SDK_GO_LOGGING"); cls == "all" {
+ // cls could be enhanced to support a comma-delimited list of log events
+ log.lst = func(cls Event, msg string) {
+ // simple console logger, it writes to stderr in the following format:
+ // [time-stamp] Event: message
+ fmt.Fprintf(os.Stderr, "[%s] %s: %s\n", time.Now().Format(time.StampMicro), cls, msg)
+ }
+ }
+}
@@ -0,0 +1,155 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package poller
+
+import (
+ "encoding/json"
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "github.com/Azure/azure-sdk-for-go/sdk/internal/exported"
+)
+
+// the well-known set of LRO status/provisioning state values.
+const (
+ StatusSucceeded = "Succeeded"
+ StatusCanceled = "Canceled"
+ StatusFailed = "Failed"
+ StatusInProgress = "InProgress"
+)
+
+// these are non-conformant states that we've seen in the wild.
+// we support them for back-compat.
+const (
+ StatusCancelled = "Cancelled"
+ StatusCompleted = "Completed"
+)
+
+// IsTerminalState returns true if the LRO's state is terminal.
+func IsTerminalState(s string) bool {
+ return Failed(s) || Succeeded(s)
+}
+
+// Failed returns true if the LRO's state is terminal failure.
+func Failed(s string) bool {
+ return strings.EqualFold(s, StatusFailed) || strings.EqualFold(s, StatusCanceled) || strings.EqualFold(s, StatusCancelled)
+}
+
+// Succeeded returns true if the LRO's state is terminal success.
+func Succeeded(s string) bool {
+ return strings.EqualFold(s, StatusSucceeded) || strings.EqualFold(s, StatusCompleted)
+}
+
+// returns true if the LRO response contains a valid HTTP status code
+func StatusCodeValid(resp *http.Response) bool {
+ return exported.HasStatusCode(resp, http.StatusOK, http.StatusAccepted, http.StatusCreated, http.StatusNoContent)
+}
+
+// IsValidURL verifies that the URL is valid and absolute.
+func IsValidURL(s string) bool {
+ u, err := url.Parse(s)
+ return err == nil && u.IsAbs()
+}
+
+// ErrNoBody is returned if the response didn't contain a body.
+var ErrNoBody = errors.New("the response did not contain a body")
+
+// GetJSON reads the response body into a raw JSON object.
+// It returns ErrNoBody if there was no content.
+func GetJSON(resp *http.Response) (map[string]any, error) {
+ body, err := exported.Payload(resp, nil)
+ if err != nil {
+ return nil, err
+ }
+ if len(body) == 0 {
+ return nil, ErrNoBody
+ }
+ // unmarshall the body to get the value
+ var jsonBody map[string]any
+ if err = json.Unmarshal(body, &jsonBody); err != nil {
+ return nil, err
+ }
+ return jsonBody, nil
+}
+
+// provisioningState returns the provisioning state from the response or the empty string.
+func provisioningState(jsonBody map[string]any) string {
+ jsonProps, ok := jsonBody["properties"]
+ if !ok {
+ return ""
+ }
+ props, ok := jsonProps.(map[string]any)
+ if !ok {
+ return ""
+ }
+ rawPs, ok := props["provisioningState"]
+ if !ok {
+ return ""
+ }
+ ps, ok := rawPs.(string)
+ if !ok {
+ return ""
+ }
+ return ps
+}
+
+// status returns the status from the response or the empty string.
+func status(jsonBody map[string]any) string {
+ rawStatus, ok := jsonBody["status"]
+ if !ok {
+ return ""
+ }
+ status, ok := rawStatus.(string)
+ if !ok {
+ return ""
+ }
+ return status
+}
+
+// GetStatus returns the LRO's status from the response body.
+// Typically used for Azure-AsyncOperation flows.
+// If there is no status in the response body the empty string is returned.
+func GetStatus(resp *http.Response) (string, error) {
+ jsonBody, err := GetJSON(resp)
+ if err != nil {
+ return "", err
+ }
+ return status(jsonBody), nil
+}
+
+// GetProvisioningState returns the LRO's state from the response body.
+// If there is no state in the response body the empty string is returned.
+func GetProvisioningState(resp *http.Response) (string, error) {
+ jsonBody, err := GetJSON(resp)
+ if err != nil {
+ return "", err
+ }
+ return provisioningState(jsonBody), nil
+}
+
+// GetResourceLocation returns the LRO's resourceLocation value from the response body.
+// Typically used for Operation-Location flows.
+// If there is no resourceLocation in the response body the empty string is returned.
+func GetResourceLocation(resp *http.Response) (string, error) {
+ jsonBody, err := GetJSON(resp)
+ if err != nil {
+ return "", err
+ }
+ v, ok := jsonBody["resourceLocation"]
+ if !ok {
+ // it might be ok if the field doesn't exist, the caller must make that determination
+ return "", nil
+ }
+ vv, ok := v.(string)
+ if !ok {
+ return "", fmt.Errorf("the resourceLocation value %v was not in string format", v)
+ }
+ return vv, nil
+}
@@ -0,0 +1,123 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package temporal
+
+import (
+ "sync"
+ "time"
+)
+
+// AcquireResource abstracts a method for refreshing a temporal resource.
+type AcquireResource[TResource, TState any] func(state TState) (newResource TResource, newExpiration time.Time, err error)
+
+// Resource is a temporal resource (usually a credential) that requires periodic refreshing.
+type Resource[TResource, TState any] struct {
+ // cond is used to synchronize access to the shared resource embodied by the remaining fields
+ cond *sync.Cond
+
+ // acquiring indicates that some thread/goroutine is in the process of acquiring/updating the resource
+ acquiring bool
+
+ // resource contains the value of the shared resource
+ resource TResource
+
+ // expiration indicates when the shared resource expires; it is 0 if the resource was never acquired
+ expiration time.Time
+
+ // lastAttempt indicates when a thread/goroutine last attempted to acquire/update the resource
+ lastAttempt time.Time
+
+ // acquireResource is the callback function that actually acquires the resource
+ acquireResource AcquireResource[TResource, TState]
+}
+
+// NewResource creates a new Resource that uses the specified AcquireResource for refreshing.
+func NewResource[TResource, TState any](ar AcquireResource[TResource, TState]) *Resource[TResource, TState] {
+ return &Resource[TResource, TState]{cond: sync.NewCond(&sync.Mutex{}), acquireResource: ar}
+}
+
+// Get returns the underlying resource.
+// If the resource is fresh, no refresh is performed.
+func (er *Resource[TResource, TState]) Get(state TState) (TResource, error) {
+ // If the resource is expiring within this time window, update it eagerly.
+ // This allows other threads/goroutines to keep running by using the not-yet-expired
+ // resource value while one thread/goroutine updates the resource.
+ const window = 5 * time.Minute // This example updates the resource 5 minutes prior to expiration
+ const backoff = 30 * time.Second // Minimum wait time between eager update attempts
+
+ now, acquire, expired := time.Now(), false, false
+
+ // acquire exclusive lock
+ er.cond.L.Lock()
+ resource := er.resource
+
+ for {
+ expired = er.expiration.IsZero() || er.expiration.Before(now)
+ if expired {
+ // The resource was never acquired or has expired
+ if !er.acquiring {
+ // If another thread/goroutine is not acquiring/updating the resource, this thread/goroutine will do it
+ er.acquiring, acquire = true, true
+ break
+ }
+ // Getting here means that this thread/goroutine will wait for the updated resource
+ } else if er.expiration.Add(-window).Before(now) {
+ // The resource is valid but is expiring within the time window
+ if !er.acquiring && er.lastAttempt.Add(backoff).Before(now) {
+ // If another thread/goroutine is not acquiring/renewing the resource, and none has attempted
+ // to do so within the last 30 seconds, this thread/goroutine will do it
+ er.acquiring, acquire = true, true
+ break
+ }
+ // This thread/goroutine will use the existing resource value while another updates it
+ resource = er.resource
+ break
+ } else {
+ // The resource is not close to expiring, this thread/goroutine should use its current value
+ resource = er.resource
+ break
+ }
+ // If we get here, wait for the new resource value to be acquired/updated
+ er.cond.Wait()
+ }
+ er.cond.L.Unlock() // Release the lock so no threads/goroutines are blocked
+
+ var err error
+ if acquire {
+ // This thread/goroutine has been selected to acquire/update the resource
+ var expiration time.Time
+ var newValue TResource
+ er.lastAttempt = now
+ newValue, expiration, err = er.acquireResource(state)
+
+ // Atomically, update the shared resource's new value & expiration.
+ er.cond.L.Lock()
+ if err == nil {
+ // Update resource & expiration, return the new value
+ resource = newValue
+ er.resource, er.expiration = resource, expiration
+ } else if !expired {
+ // An eager update failed. Discard the error and return the current--still valid--resource value
+ err = nil
+ }
+ er.acquiring = false // Indicate that no thread/goroutine is currently acquiring the resource
+
+ // Wake up any waiting threads/goroutines since there is a resource they can ALL use
+ er.cond.L.Unlock()
+ er.cond.Broadcast()
+ }
+ return resource, err // Return the resource this thread/goroutine can use
+}
+
+// Expire marks the resource as expired, ensuring it's refreshed on the next call to Get().
+func (er *Resource[TResource, TState]) Expire() {
+ er.cond.L.Lock()
+ defer er.cond.L.Unlock()
+
+ // Reset the expiration as if we never got this resource to begin with
+ er.expiration = time.Time{}
+}
@@ -0,0 +1,7 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package uuid
@@ -0,0 +1,76 @@
+//go:build go1.18
+// +build go1.18
+
+// Copyright (c) Microsoft Corporation. All rights reserved.
+// Licensed under the MIT License.
+
+package uuid
+
+import (
+ "crypto/rand"
+ "errors"
+ "fmt"
+ "strconv"
+)
+
+// The UUID reserved variants.
+const (
+ reservedRFC4122 byte = 0x40
+)
+
+// A UUID representation compliant with specification in RFC4122 document.
+type UUID [16]byte
+
+// New returns a new UUID using the RFC4122 algorithm.
+func New() (UUID, error) {
+ u := UUID{}
+ // Set all bits to pseudo-random values.
+ // NOTE: this takes a process-wide lock
+ _, err := rand.Read(u[:])
+ if err != nil {
+ return u, err
+ }
+ u[8] = (u[8] | reservedRFC4122) & 0x7F // u.setVariant(ReservedRFC4122)
+
+ var version byte = 4
+ u[6] = (u[6] & 0xF) | (version << 4) // u.setVersion(4)
+ return u, nil
+}
+
+// String returns the UUID in "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" format.
+func (u UUID) String() string {
+ return fmt.Sprintf("%x-%x-%x-%x-%x", u[0:4], u[4:6], u[6:8], u[8:10], u[10:])
+}
+
+// Parse parses a string formatted as "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx"
+// or "{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}" into a UUID.
+func Parse(s string) (UUID, error) {
+ var uuid UUID
+ // ensure format
+ switch len(s) {
+ case 36:
+ // xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx
+ case 38:
+ // {xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}
+ s = s[1:37]
+ default:
+ return uuid, errors.New("invalid UUID format")
+ }
+ if s[8] != '-' || s[13] != '-' || s[18] != '-' || s[23] != '-' {
+ return uuid, errors.New("invalid UUID format")
+ }
+ // parse chunks
+ for i, x := range [16]int{
+ 0, 2, 4, 6,
+ 9, 11,
+ 14, 16,
+ 19, 21,
+ 24, 26, 28, 30, 32, 34} {
+ b, err := strconv.ParseUint(s[x:x+2], 16, 8)
+ if err != nil {
+ return uuid, fmt.Errorf("invalid UUID format: %s", err)
+ }
+ uuid[i] = byte(b)
+ }
+ return uuid, nil
+}
@@ -0,0 +1,21 @@
+ MIT License
+
+ Copyright (c) Microsoft Corporation.
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE
@@ -0,0 +1,54 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+/*
+Package cache allows third parties to implement external storage for caching token data
+for distributed systems or multiple local applications access.
+
+The data stored and extracted will represent the entire cache. Therefore it is recommended
+one msal instance per user. This data is considered opaque and there are no guarantees to
+implementers on the format being passed.
+*/
+package cache
+
+import "context"
+
+// Marshaler marshals data from an internal cache to bytes that can be stored.
+type Marshaler interface {
+ Marshal() ([]byte, error)
+}
+
+// Unmarshaler unmarshals data from a storage medium into the internal cache, overwriting it.
+type Unmarshaler interface {
+ Unmarshal([]byte) error
+}
+
+// Serializer can serialize the cache to binary or from binary into the cache.
+type Serializer interface {
+ Marshaler
+ Unmarshaler
+}
+
+// ExportHints are suggestions for storing data.
+type ExportHints struct {
+ // PartitionKey is a suggested key for partitioning the cache
+ PartitionKey string
+}
+
+// ReplaceHints are suggestions for loading data.
+type ReplaceHints struct {
+ // PartitionKey is a suggested key for partitioning the cache
+ PartitionKey string
+}
+
+// ExportReplace exports and replaces in-memory cache data. It doesn't support nil Context or
+// define the outcome of passing one. A Context without a timeout must receive a default timeout
+// specified by the implementor. Retries must be implemented inside the implementation.
+type ExportReplace interface {
+ // Replace replaces the cache with what is in external storage. Implementors should honor
+ // Context cancellations and return context.Canceled or context.DeadlineExceeded in those cases.
+ Replace(ctx context.Context, cache Unmarshaler, hints ReplaceHints) error
+ // Export writes the binary representation of the cache (cache.Marshal()) to external storage.
+ // This is considered opaque. Context cancellations should be honored as in Replace.
+ Export(ctx context.Context, cache Marshaler, hints ExportHints) error
+}
@@ -0,0 +1,719 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+/*
+Package confidential provides a client for authentication of "confidential" applications.
+A "confidential" application is defined as an app that run on servers. They are considered
+difficult to access and for that reason capable of keeping an application secret.
+Confidential clients can hold configuration-time secrets.
+*/
+package confidential
+
+import (
+ "context"
+ "crypto"
+ "crypto/rsa"
+ "crypto/x509"
+ "encoding/base64"
+ "encoding/pem"
+ "errors"
+ "fmt"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/cache"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/exported"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/options"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared"
+)
+
+/*
+Design note:
+
+confidential.Client uses base.Client as an embedded type. base.Client statically assigns its attributes
+during creation. As it doesn't have any pointers in it, anything borrowed from it, such as
+Base.AuthParams is a copy that is free to be manipulated here.
+
+Duplicate Calls shared between public.Client and this package:
+There is some duplicate call options provided here that are the same as in public.Client . This
+is a design choices. Go proverb(https://www.youtube.com/watch?v=PAAkCSZUG1c&t=9m28s):
+"a little copying is better than a little dependency". Yes, we could have another package with
+shared options (fail). That divides like 2 options from all others which makes the user look
+through more docs. We can have all clients in one package, but I think separate packages
+here makes for better naming (public.Client vs client.PublicClient). So I chose a little
+duplication.
+
+.Net People, Take note on X509:
+This uses x509.Certificates and private keys. x509 does not store private keys. .Net
+has a x509.Certificate2 abstraction that has private keys, but that just a strange invention.
+As such I've put a PEM decoder into here.
+*/
+
+// TODO(msal): This should have example code for each method on client using Go's example doc framework.
+// base usage details should be include in the package documentation.
+
+// AuthResult contains the results of one token acquisition operation.
+// For details see https://aka.ms/msal-net-authenticationresult
+type AuthResult = base.AuthResult
+
+type AuthenticationScheme = authority.AuthenticationScheme
+
+type Account = shared.Account
+
+// CertFromPEM converts a PEM file (.pem or .key) for use with [NewCredFromCert]. The file
+// must contain the public certificate and the private key. If a PEM block is encrypted and
+// password is not an empty string, it attempts to decrypt the PEM blocks using the password.
+// Multiple certs are due to certificate chaining for use cases like TLS that sign from root to leaf.
+func CertFromPEM(pemData []byte, password string) ([]*x509.Certificate, crypto.PrivateKey, error) {
+ var certs []*x509.Certificate
+ var priv crypto.PrivateKey
+ for {
+ block, rest := pem.Decode(pemData)
+ if block == nil {
+ break
+ }
+
+ //nolint:staticcheck // x509.IsEncryptedPEMBlock and x509.DecryptPEMBlock are deprecated. They are used here only to support a usecase.
+ if x509.IsEncryptedPEMBlock(block) {
+ b, err := x509.DecryptPEMBlock(block, []byte(password))
+ if err != nil {
+ return nil, nil, fmt.Errorf("could not decrypt encrypted PEM block: %v", err)
+ }
+ block, _ = pem.Decode(b)
+ if block == nil {
+ return nil, nil, fmt.Errorf("encounter encrypted PEM block that did not decode")
+ }
+ }
+
+ switch block.Type {
+ case "CERTIFICATE":
+ cert, err := x509.ParseCertificate(block.Bytes)
+ if err != nil {
+ return nil, nil, fmt.Errorf("block labelled 'CERTIFICATE' could not be parsed by x509: %v", err)
+ }
+ certs = append(certs, cert)
+ case "PRIVATE KEY":
+ if priv != nil {
+ return nil, nil, errors.New("found multiple private key blocks")
+ }
+
+ var err error
+ priv, err = x509.ParsePKCS8PrivateKey(block.Bytes)
+ if err != nil {
+ return nil, nil, fmt.Errorf("could not decode private key: %v", err)
+ }
+ case "RSA PRIVATE KEY":
+ if priv != nil {
+ return nil, nil, errors.New("found multiple private key blocks")
+ }
+ var err error
+ priv, err = x509.ParsePKCS1PrivateKey(block.Bytes)
+ if err != nil {
+ return nil, nil, fmt.Errorf("could not decode private key: %v", err)
+ }
+ }
+ pemData = rest
+ }
+
+ if len(certs) == 0 {
+ return nil, nil, fmt.Errorf("no certificates found")
+ }
+
+ if priv == nil {
+ return nil, nil, fmt.Errorf("no private key found")
+ }
+
+ return certs, priv, nil
+}
+
+// AssertionRequestOptions has required information for client assertion claims
+type AssertionRequestOptions = exported.AssertionRequestOptions
+
+// Credential represents the credential used in confidential client flows.
+type Credential struct {
+ secret string
+
+ cert *x509.Certificate
+ key crypto.PrivateKey
+ x5c []string
+
+ assertionCallback func(context.Context, AssertionRequestOptions) (string, error)
+
+ tokenProvider func(context.Context, TokenProviderParameters) (TokenProviderResult, error)
+}
+
+// toInternal returns the accesstokens.Credential that is used internally. The current structure of the
+// code requires that client.go, requests.go and confidential.go share a credential type without
+// having import recursion. That requires the type used between is in a shared package. Therefore
+// we have this.
+func (c Credential) toInternal() (*accesstokens.Credential, error) {
+ if c.secret != "" {
+ return &accesstokens.Credential{Secret: c.secret}, nil
+ }
+ if c.cert != nil {
+ if c.key == nil {
+ return nil, errors.New("missing private key for certificate")
+ }
+ return &accesstokens.Credential{Cert: c.cert, Key: c.key, X5c: c.x5c}, nil
+ }
+ if c.key != nil {
+ return nil, errors.New("missing certificate for private key")
+ }
+ if c.assertionCallback != nil {
+ return &accesstokens.Credential{AssertionCallback: c.assertionCallback}, nil
+ }
+ if c.tokenProvider != nil {
+ return &accesstokens.Credential{TokenProvider: c.tokenProvider}, nil
+ }
+ return nil, errors.New("invalid credential")
+}
+
+// NewCredFromSecret creates a Credential from a secret.
+func NewCredFromSecret(secret string) (Credential, error) {
+ if secret == "" {
+ return Credential{}, errors.New("secret can't be empty string")
+ }
+ return Credential{secret: secret}, nil
+}
+
+// NewCredFromAssertionCallback creates a Credential that invokes a callback to get assertions
+// authenticating the application. The callback must be thread safe.
+func NewCredFromAssertionCallback(callback func(context.Context, AssertionRequestOptions) (string, error)) Credential {
+ return Credential{assertionCallback: callback}
+}
+
+// NewCredFromCert creates a Credential from a certificate or chain of certificates and an RSA private key
+// as returned by [CertFromPEM].
+func NewCredFromCert(certs []*x509.Certificate, key crypto.PrivateKey) (Credential, error) {
+ cred := Credential{key: key}
+ k, ok := key.(*rsa.PrivateKey)
+ if !ok {
+ return cred, errors.New("key must be an RSA key")
+ }
+ for _, cert := range certs {
+ if cert == nil {
+ // not returning an error here because certs may still contain a sufficient cert/key pair
+ continue
+ }
+ certKey, ok := cert.PublicKey.(*rsa.PublicKey)
+ if ok && k.E == certKey.E && k.N.Cmp(certKey.N) == 0 {
+ // We know this is the signing cert because its public key matches the given private key.
+ // This cert must be first in x5c.
+ cred.cert = cert
+ cred.x5c = append([]string{base64.StdEncoding.EncodeToString(cert.Raw)}, cred.x5c...)
+ } else {
+ cred.x5c = append(cred.x5c, base64.StdEncoding.EncodeToString(cert.Raw))
+ }
+ }
+ if cred.cert == nil {
+ return cred, errors.New("key doesn't match any certificate")
+ }
+ return cred, nil
+}
+
+// TokenProviderParameters is the authentication parameters passed to token providers
+type TokenProviderParameters = exported.TokenProviderParameters
+
+// TokenProviderResult is the authentication result returned by custom token providers
+type TokenProviderResult = exported.TokenProviderResult
+
+// NewCredFromTokenProvider creates a Credential from a function that provides access tokens. The function
+// must be concurrency safe. This is intended only to allow the Azure SDK to cache MSI tokens. It isn't
+// useful to applications in general because the token provider must implement all authentication logic.
+func NewCredFromTokenProvider(provider func(context.Context, TokenProviderParameters) (TokenProviderResult, error)) Credential {
+ return Credential{tokenProvider: provider}
+}
+
+// AutoDetectRegion instructs MSAL Go to auto detect region for Azure regional token service.
+func AutoDetectRegion() string {
+ return "TryAutoDetect"
+}
+
+// Client is a representation of authentication client for confidential applications as defined in the
+// package doc. A new Client should be created PER SERVICE USER.
+// For more information, visit https://docs.microsoft.com/azure/active-directory/develop/msal-client-applications
+type Client struct {
+ base base.Client
+ cred *accesstokens.Credential
+}
+
+// clientOptions are optional settings for New(). These options are set using various functions
+// returning Option calls.
+type clientOptions struct {
+ accessor cache.ExportReplace
+ authority, azureRegion string
+ capabilities []string
+ disableInstanceDiscovery, sendX5C bool
+ httpClient ops.HTTPClient
+}
+
+// Option is an optional argument to New().
+type Option func(o *clientOptions)
+
+// WithCache provides an accessor that will read and write authentication data to an externally managed cache.
+func WithCache(accessor cache.ExportReplace) Option {
+ return func(o *clientOptions) {
+ o.accessor = accessor
+ }
+}
+
+// WithClientCapabilities allows configuring one or more client capabilities such as "CP1"
+func WithClientCapabilities(capabilities []string) Option {
+ return func(o *clientOptions) {
+ // there's no danger of sharing the slice's underlying memory with the application because
+ // this slice is simply passed to base.WithClientCapabilities, which copies its data
+ o.capabilities = capabilities
+ }
+}
+
+// WithHTTPClient allows for a custom HTTP client to be set.
+func WithHTTPClient(httpClient ops.HTTPClient) Option {
+ return func(o *clientOptions) {
+ o.httpClient = httpClient
+ }
+}
+
+// WithX5C specifies if x5c claim(public key of the certificate) should be sent to STS to enable Subject Name Issuer Authentication.
+func WithX5C() Option {
+ return func(o *clientOptions) {
+ o.sendX5C = true
+ }
+}
+
+// WithInstanceDiscovery set to false to disable authority validation (to support private cloud scenarios)
+func WithInstanceDiscovery(enabled bool) Option {
+ return func(o *clientOptions) {
+ o.disableInstanceDiscovery = !enabled
+ }
+}
+
+// WithAzureRegion sets the region(preferred) or Confidential.AutoDetectRegion() for auto detecting region.
+// Region names as per https://azure.microsoft.com/en-ca/global-infrastructure/geographies/.
+// See https://aka.ms/region-map for more details on region names.
+// The region value should be short region name for the region where the service is deployed.
+// For example "centralus" is short name for region Central US.
+// Not all auth flows can use the regional token service.
+// Service To Service (client credential flow) tokens can be obtained from the regional service.
+// Requires configuration at the tenant level.
+// Auto-detection works on a limited number of Azure artifacts (VMs, Azure functions).
+// If auto-detection fails, the non-regional endpoint will be used.
+// If an invalid region name is provided, the non-regional endpoint MIGHT be used or the token request MIGHT fail.
+func WithAzureRegion(val string) Option {
+ return func(o *clientOptions) {
+ o.azureRegion = val
+ }
+}
+
+// New is the constructor for Client. authority is the URL of a token authority such as "https://login.microsoftonline.com/<your tenant>".
+// If the Client will connect directly to AD FS, use "adfs" for the tenant. clientID is the application's client ID (also called its
+// "application ID").
+func New(authority, clientID string, cred Credential, options ...Option) (Client, error) {
+ internalCred, err := cred.toInternal()
+ if err != nil {
+ return Client{}, err
+ }
+
+ opts := clientOptions{
+ authority: authority,
+ // if the caller specified a token provider, it will handle all details of authentication, using Client only as a token cache
+ disableInstanceDiscovery: cred.tokenProvider != nil,
+ httpClient: shared.DefaultClient,
+ }
+ for _, o := range options {
+ o(&opts)
+ }
+ baseOpts := []base.Option{
+ base.WithCacheAccessor(opts.accessor),
+ base.WithClientCapabilities(opts.capabilities),
+ base.WithInstanceDiscovery(!opts.disableInstanceDiscovery),
+ base.WithRegionDetection(opts.azureRegion),
+ base.WithX5C(opts.sendX5C),
+ }
+ base, err := base.New(clientID, opts.authority, oauth.New(opts.httpClient), baseOpts...)
+ if err != nil {
+ return Client{}, err
+ }
+ base.AuthParams.IsConfidentialClient = true
+
+ return Client{base: base, cred: internalCred}, nil
+}
+
+// authCodeURLOptions contains options for AuthCodeURL
+type authCodeURLOptions struct {
+ claims, loginHint, tenantID, domainHint string
+}
+
+// AuthCodeURLOption is implemented by options for AuthCodeURL
+type AuthCodeURLOption interface {
+ authCodeURLOption()
+}
+
+// AuthCodeURL creates a URL used to acquire an authorization code. Users need to call CreateAuthorizationCodeURLParameters and pass it in.
+//
+// Options: [WithClaims], [WithDomainHint], [WithLoginHint], [WithTenantID]
+func (cca Client) AuthCodeURL(ctx context.Context, clientID, redirectURI string, scopes []string, opts ...AuthCodeURLOption) (string, error) {
+ o := authCodeURLOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return "", err
+ }
+ ap, err := cca.base.AuthParams.WithTenant(o.tenantID)
+ if err != nil {
+ return "", err
+ }
+ ap.Claims = o.claims
+ ap.LoginHint = o.loginHint
+ ap.DomainHint = o.domainHint
+ return cca.base.AuthCodeURL(ctx, clientID, redirectURI, scopes, ap)
+}
+
+// WithLoginHint pre-populates the login prompt with a username.
+func WithLoginHint(username string) interface {
+ AuthCodeURLOption
+ options.CallOption
+} {
+ return struct {
+ AuthCodeURLOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *authCodeURLOptions:
+ t.loginHint = username
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithDomainHint adds the IdP domain as domain_hint query parameter in the auth url.
+func WithDomainHint(domain string) interface {
+ AuthCodeURLOption
+ options.CallOption
+} {
+ return struct {
+ AuthCodeURLOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *authCodeURLOptions:
+ t.domainHint = domain
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithClaims sets additional claims to request for the token, such as those required by conditional access policies.
+// Use this option when Azure AD returned a claims challenge for a prior request. The argument must be decoded.
+// This option is valid for any token acquisition method.
+func WithClaims(claims string) interface {
+ AcquireByAuthCodeOption
+ AcquireByCredentialOption
+ AcquireOnBehalfOfOption
+ AcquireSilentOption
+ AuthCodeURLOption
+ options.CallOption
+} {
+ return struct {
+ AcquireByAuthCodeOption
+ AcquireByCredentialOption
+ AcquireOnBehalfOfOption
+ AcquireSilentOption
+ AuthCodeURLOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenByAuthCodeOptions:
+ t.claims = claims
+ case *acquireTokenByCredentialOptions:
+ t.claims = claims
+ case *acquireTokenOnBehalfOfOptions:
+ t.claims = claims
+ case *acquireTokenSilentOptions:
+ t.claims = claims
+ case *authCodeURLOptions:
+ t.claims = claims
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithAuthenticationScheme is an extensibility mechanism designed to be used only by Azure Arc for proof of possession access tokens.
+func WithAuthenticationScheme(authnScheme AuthenticationScheme) interface {
+ AcquireSilentOption
+ AcquireByCredentialOption
+ options.CallOption
+} {
+ return struct {
+ AcquireSilentOption
+ AcquireByCredentialOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenSilentOptions:
+ t.authnScheme = authnScheme
+ case *acquireTokenByCredentialOptions:
+ t.authnScheme = authnScheme
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithTenantID specifies a tenant for a single authentication. It may be different than the tenant set in [New].
+// This option is valid for any token acquisition method.
+func WithTenantID(tenantID string) interface {
+ AcquireByAuthCodeOption
+ AcquireByCredentialOption
+ AcquireOnBehalfOfOption
+ AcquireSilentOption
+ AuthCodeURLOption
+ options.CallOption
+} {
+ return struct {
+ AcquireByAuthCodeOption
+ AcquireByCredentialOption
+ AcquireOnBehalfOfOption
+ AcquireSilentOption
+ AuthCodeURLOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenByAuthCodeOptions:
+ t.tenantID = tenantID
+ case *acquireTokenByCredentialOptions:
+ t.tenantID = tenantID
+ case *acquireTokenOnBehalfOfOptions:
+ t.tenantID = tenantID
+ case *acquireTokenSilentOptions:
+ t.tenantID = tenantID
+ case *authCodeURLOptions:
+ t.tenantID = tenantID
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// acquireTokenSilentOptions are all the optional settings to an AcquireTokenSilent() call.
+// These are set by using various AcquireTokenSilentOption functions.
+type acquireTokenSilentOptions struct {
+ account Account
+ claims, tenantID string
+ authnScheme AuthenticationScheme
+}
+
+// AcquireSilentOption is implemented by options for AcquireTokenSilent
+type AcquireSilentOption interface {
+ acquireSilentOption()
+}
+
+// WithSilentAccount uses the passed account during an AcquireTokenSilent() call.
+func WithSilentAccount(account Account) interface {
+ AcquireSilentOption
+ options.CallOption
+} {
+ return struct {
+ AcquireSilentOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenSilentOptions:
+ t.account = account
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// AcquireTokenSilent acquires a token from either the cache or using a refresh token.
+//
+// Options: [WithClaims], [WithSilentAccount], [WithTenantID]
+func (cca Client) AcquireTokenSilent(ctx context.Context, scopes []string, opts ...AcquireSilentOption) (AuthResult, error) {
+ o := acquireTokenSilentOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return AuthResult{}, err
+ }
+
+ if o.claims != "" {
+ return AuthResult{}, errors.New("call another AcquireToken method to request a new token having these claims")
+ }
+
+ silentParameters := base.AcquireTokenSilentParameters{
+ Scopes: scopes,
+ Account: o.account,
+ RequestType: accesstokens.ATConfidential,
+ Credential: cca.cred,
+ IsAppCache: o.account.IsZero(),
+ TenantID: o.tenantID,
+ AuthnScheme: o.authnScheme,
+ }
+
+ return cca.base.AcquireTokenSilent(ctx, silentParameters)
+}
+
+// acquireTokenByAuthCodeOptions contains the optional parameters used to acquire an access token using the authorization code flow.
+type acquireTokenByAuthCodeOptions struct {
+ challenge, claims, tenantID string
+}
+
+// AcquireByAuthCodeOption is implemented by options for AcquireTokenByAuthCode
+type AcquireByAuthCodeOption interface {
+ acquireByAuthCodeOption()
+}
+
+// WithChallenge allows you to provide a challenge for the .AcquireTokenByAuthCode() call.
+func WithChallenge(challenge string) interface {
+ AcquireByAuthCodeOption
+ options.CallOption
+} {
+ return struct {
+ AcquireByAuthCodeOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenByAuthCodeOptions:
+ t.challenge = challenge
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// AcquireTokenByAuthCode is a request to acquire a security token from the authority, using an authorization code.
+// The specified redirect URI must be the same URI that was used when the authorization code was requested.
+//
+// Options: [WithChallenge], [WithClaims], [WithTenantID]
+func (cca Client) AcquireTokenByAuthCode(ctx context.Context, code string, redirectURI string, scopes []string, opts ...AcquireByAuthCodeOption) (AuthResult, error) {
+ o := acquireTokenByAuthCodeOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return AuthResult{}, err
+ }
+
+ params := base.AcquireTokenAuthCodeParameters{
+ Scopes: scopes,
+ Code: code,
+ Challenge: o.challenge,
+ Claims: o.claims,
+ AppType: accesstokens.ATConfidential,
+ Credential: cca.cred, // This setting differs from public.Client.AcquireTokenByAuthCode
+ RedirectURI: redirectURI,
+ TenantID: o.tenantID,
+ }
+
+ return cca.base.AcquireTokenByAuthCode(ctx, params)
+}
+
+// acquireTokenByCredentialOptions contains optional configuration for AcquireTokenByCredential
+type acquireTokenByCredentialOptions struct {
+ claims, tenantID string
+ authnScheme AuthenticationScheme
+}
+
+// AcquireByCredentialOption is implemented by options for AcquireTokenByCredential
+type AcquireByCredentialOption interface {
+ acquireByCredOption()
+}
+
+// AcquireTokenByCredential acquires a security token from the authority, using the client credentials grant.
+//
+// Options: [WithClaims], [WithTenantID]
+func (cca Client) AcquireTokenByCredential(ctx context.Context, scopes []string, opts ...AcquireByCredentialOption) (AuthResult, error) {
+ o := acquireTokenByCredentialOptions{}
+ err := options.ApplyOptions(&o, opts)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ authParams, err := cca.base.AuthParams.WithTenant(o.tenantID)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ authParams.Scopes = scopes
+ authParams.AuthorizationType = authority.ATClientCredentials
+ authParams.Claims = o.claims
+ if o.authnScheme != nil {
+ authParams.AuthnScheme = o.authnScheme
+ }
+ token, err := cca.base.Token.Credential(ctx, authParams, cca.cred)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ return cca.base.AuthResultFromToken(ctx, authParams, token, true)
+}
+
+// acquireTokenOnBehalfOfOptions contains optional configuration for AcquireTokenOnBehalfOf
+type acquireTokenOnBehalfOfOptions struct {
+ claims, tenantID string
+}
+
+// AcquireOnBehalfOfOption is implemented by options for AcquireTokenOnBehalfOf
+type AcquireOnBehalfOfOption interface {
+ acquireOBOOption()
+}
+
+// AcquireTokenOnBehalfOf acquires a security token for an app using middle tier apps access token.
+// Refer https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-oauth2-on-behalf-of-flow.
+//
+// Options: [WithClaims], [WithTenantID]
+func (cca Client) AcquireTokenOnBehalfOf(ctx context.Context, userAssertion string, scopes []string, opts ...AcquireOnBehalfOfOption) (AuthResult, error) {
+ o := acquireTokenOnBehalfOfOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return AuthResult{}, err
+ }
+ params := base.AcquireTokenOnBehalfOfParameters{
+ Scopes: scopes,
+ UserAssertion: userAssertion,
+ Claims: o.claims,
+ Credential: cca.cred,
+ TenantID: o.tenantID,
+ }
+ return cca.base.AcquireTokenOnBehalfOf(ctx, params)
+}
+
+// Account gets the account in the token cache with the specified homeAccountID.
+func (cca Client) Account(ctx context.Context, accountID string) (Account, error) {
+ return cca.base.Account(ctx, accountID)
+}
+
+// RemoveAccount signs the account out and forgets account from token cache.
+func (cca Client) RemoveAccount(ctx context.Context, account Account) error {
+ return cca.base.RemoveAccount(ctx, account)
+}
@@ -0,0 +1,111 @@
+# MSAL Error Design
+
+Author: Abhidnya Patil(abhidnya.patil@microsoft.com)
+
+Contributors:
+
+- John Doak(jdoak@microsoft.com)
+- Keegan Caruso(Keegan.Caruso@microsoft.com)
+- Joel Hendrix(jhendrix@microsoft.com)
+
+## Background
+
+Errors in MSAL are intended for app developers to troubleshoot and not for displaying to end-users.
+
+### Go error handling vs other MSAL languages
+
+Most modern languages use exception based errors. Simply put, you "throw" an exception and it must be caught at some routine in the upper stack or it will eventually crash the program.
+
+Go doesn't use exceptions, instead it relies on multiple return values, one of which can be the builtin error interface type. It is up to the user to decide what to do.
+
+### Go custom error types
+
+Errors can be created in Go by simply using errors.New() or fmt.Errorf() to create an "error".
+
+Custom errors can be created in multiple ways. One of the more robust ways is simply to satisfy the error interface:
+
+```go
+type MyCustomErr struct {
+ Msg string
+}
+func (m MyCustomErr) Error() string { // This implements "error"
+ return m.Msg
+}
+```
+
+### MSAL Error Goals
+
+- Provide diagnostics to the user and for tickets that can be used to track down bugs or client misconfigurations
+- Detect errors that are transitory and can be retried
+- Allow the user to identify certain errors that the program can respond to, such a informing the user for the need to do an enrollment
+
+## Implementing Client Side Errors
+
+Client side errors indicate a misconfiguration or passing of bad arguments that is non-recoverable. Retrying isn't possible.
+
+These errors can simply be standard Go errors created by errors.New() or fmt.Errorf(). If down the line we need a custom error, we can introduce it, but for now the error messages just need to be clear on what the issue was.
+
+## Implementing Service Side Errors
+
+Service side errors occur when an external RPC responds either with an HTTP error code or returns a message that includes an error.
+
+These errors can be transitory (please slow down) or permanent (HTTP 404). To provide our diagnostic goals, we require the ability to differentiate these errors from other errors.
+
+The current implementation includes a specialized type that captures any error from the server:
+
+```go
+// CallErr represents an HTTP call error. Has a Verbose() method that allows getting the
+// http.Request and Response objects. Implements error.
+type CallErr struct {
+ Req *http.Request
+ Resp *http.Response
+ Err error
+}
+
+// Errors implements error.Error().
+func (e CallErr) Error() string {
+ return e.Err.Error()
+}
+
+// Verbose prints a versbose error message with the request or response.
+func (e CallErr) Verbose() string {
+ e.Resp.Request = nil // This brings in a bunch of TLS stuff we don't need
+ e.Resp.TLS = nil // Same
+ return fmt.Sprintf("%s:\nRequest:\n%s\nResponse:\n%s", e.Err, prettyConf.Sprint(e.Req), prettyConf.Sprint(e.Resp))
+}
+```
+
+A user will always receive the most concise error we provide. They can tell if it is a server side error using Go error package:
+
+```go
+var callErr CallErr
+if errors.As(err, &callErr) {
+ ...
+}
+```
+
+We provide a Verbose() function that can retrieve the most verbose message from any error we provide:
+
+```go
+fmt.Println(errors.Verbose(err))
+```
+
+If further differentiation is required, we can add custom errors that use Go error wrapping on top of CallErr to achieve our diagnostic goals (such as detecting when to retry a call due to transient errors).
+
+CallErr is always thrown from the comm package (which handles all http requests) and looks similar to:
+
+```go
+return nil, errors.CallErr{
+ Req: req,
+ Resp: reply,
+ Err: fmt.Errorf("http call(%s)(%s) error: reply status code was %d:\n%s", req.URL.String(), req.Method, reply.StatusCode, ErrorResponse), //ErrorResponse is the json body extracted from the http response
+ }
+```
+
+## Future Decisions
+
+The ability to retry calls needs to have centralized responsibility. Either the user is doing it or the client is doing it.
+
+If the user should be responsible, our errors package will include a CanRetry() function that will inform the user if the error provided to them is retryable. This is based on the http error code and possibly the type of error that was returned. It would also include a sleep time if the server returned an amount of time to wait.
+
+Otherwise we will do this internally and retries will be left to us.
@@ -0,0 +1,89 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package errors
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "reflect"
+ "strings"
+
+ "github.com/kylelemons/godebug/pretty"
+)
+
+var prettyConf = &pretty.Config{
+ IncludeUnexported: false,
+ SkipZeroFields: true,
+ TrackCycles: true,
+ Formatter: map[reflect.Type]interface{}{
+ reflect.TypeOf((*io.Reader)(nil)).Elem(): func(r io.Reader) string {
+ b, err := io.ReadAll(r)
+ if err != nil {
+ return "could not read io.Reader content"
+ }
+ return string(b)
+ },
+ },
+}
+
+type verboser interface {
+ Verbose() string
+}
+
+// Verbose prints the most verbose error that the error message has.
+func Verbose(err error) string {
+ build := strings.Builder{}
+ for {
+ if err == nil {
+ break
+ }
+ if v, ok := err.(verboser); ok {
+ build.WriteString(v.Verbose())
+ } else {
+ build.WriteString(err.Error())
+ }
+ err = errors.Unwrap(err)
+ }
+ return build.String()
+}
+
+// New is equivalent to errors.New().
+func New(text string) error {
+ return errors.New(text)
+}
+
+// CallErr represents an HTTP call error. Has a Verbose() method that allows getting the
+// http.Request and Response objects. Implements error.
+type CallErr struct {
+ Req *http.Request
+ // Resp contains response body
+ Resp *http.Response
+ Err error
+}
+
+// Errors implements error.Error().
+func (e CallErr) Error() string {
+ return e.Err.Error()
+}
+
+// Verbose prints a versbose error message with the request or response.
+func (e CallErr) Verbose() string {
+ e.Resp.Request = nil // This brings in a bunch of TLS crap we don't need
+ e.Resp.TLS = nil // Same
+ return fmt.Sprintf("%s:\nRequest:\n%s\nResponse:\n%s", e.Err, prettyConf.Sprint(e.Req), prettyConf.Sprint(e.Resp))
+}
+
+// Is reports whether any error in errors chain matches target.
+func Is(err, target error) bool {
+ return errors.Is(err, target)
+}
+
+// As finds the first error in errors chain that matches target,
+// and if so, sets target to that error value and returns true.
+// Otherwise, it returns false.
+func As(err error, target interface{}) bool {
+ return errors.As(err, target)
+}
@@ -0,0 +1,477 @@
+// Package base contains a "Base" client that is used by the external public.Client and confidential.Client.
+// Base holds shared attributes that must be available to both clients and methods that act as
+// shared calls.
+package base
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/url"
+ "reflect"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/cache"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base/internal/storage"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared"
+)
+
+const (
+ // AuthorityPublicCloud is the default AAD authority host
+ AuthorityPublicCloud = "https://login.microsoftonline.com/common"
+ scopeSeparator = " "
+)
+
+// manager provides an internal cache. It is defined to allow faking the cache in tests.
+// In production it's a *storage.Manager or *storage.PartitionedManager.
+type manager interface {
+ cache.Serializer
+ Read(context.Context, authority.AuthParams) (storage.TokenResponse, error)
+ Write(authority.AuthParams, accesstokens.TokenResponse) (shared.Account, error)
+}
+
+// accountManager is a manager that also caches accounts. In production it's a *storage.Manager.
+type accountManager interface {
+ manager
+ AllAccounts() []shared.Account
+ Account(homeAccountID string) shared.Account
+ RemoveAccount(account shared.Account, clientID string)
+}
+
+// AcquireTokenSilentParameters contains the parameters to acquire a token silently (from cache).
+type AcquireTokenSilentParameters struct {
+ Scopes []string
+ Account shared.Account
+ RequestType accesstokens.AppType
+ Credential *accesstokens.Credential
+ IsAppCache bool
+ TenantID string
+ UserAssertion string
+ AuthorizationType authority.AuthorizeType
+ Claims string
+ AuthnScheme authority.AuthenticationScheme
+}
+
+// AcquireTokenAuthCodeParameters contains the parameters required to acquire an access token using the auth code flow.
+// To use PKCE, set the CodeChallengeParameter.
+// Code challenges are used to secure authorization code grants; for more information, visit
+// https://tools.ietf.org/html/rfc7636.
+type AcquireTokenAuthCodeParameters struct {
+ Scopes []string
+ Code string
+ Challenge string
+ Claims string
+ RedirectURI string
+ AppType accesstokens.AppType
+ Credential *accesstokens.Credential
+ TenantID string
+}
+
+type AcquireTokenOnBehalfOfParameters struct {
+ Scopes []string
+ Claims string
+ Credential *accesstokens.Credential
+ TenantID string
+ UserAssertion string
+}
+
+// AuthResult contains the results of one token acquisition operation in PublicClientApplication
+// or ConfidentialClientApplication. For details see https://aka.ms/msal-net-authenticationresult
+type AuthResult struct {
+ Account shared.Account
+ IDToken accesstokens.IDToken
+ AccessToken string
+ ExpiresOn time.Time
+ GrantedScopes []string
+ DeclinedScopes []string
+}
+
+// AuthResultFromStorage creates an AuthResult from a storage token response (which is generated from the cache).
+func AuthResultFromStorage(storageTokenResponse storage.TokenResponse) (AuthResult, error) {
+ if err := storageTokenResponse.AccessToken.Validate(); err != nil {
+ return AuthResult{}, fmt.Errorf("problem with access token in StorageTokenResponse: %w", err)
+ }
+
+ account := storageTokenResponse.Account
+ accessToken := storageTokenResponse.AccessToken.Secret
+ grantedScopes := strings.Split(storageTokenResponse.AccessToken.Scopes, scopeSeparator)
+
+ // Checking if there was an ID token in the cache; this will throw an error in the case of confidential client applications.
+ var idToken accesstokens.IDToken
+ if !storageTokenResponse.IDToken.IsZero() {
+ err := idToken.UnmarshalJSON([]byte(storageTokenResponse.IDToken.Secret))
+ if err != nil {
+ return AuthResult{}, fmt.Errorf("problem decoding JWT token: %w", err)
+ }
+ }
+ return AuthResult{account, idToken, accessToken, storageTokenResponse.AccessToken.ExpiresOn.T, grantedScopes, nil}, nil
+}
+
+// NewAuthResult creates an AuthResult.
+func NewAuthResult(tokenResponse accesstokens.TokenResponse, account shared.Account) (AuthResult, error) {
+ if len(tokenResponse.DeclinedScopes) > 0 {
+ return AuthResult{}, fmt.Errorf("token response failed because declined scopes are present: %s", strings.Join(tokenResponse.DeclinedScopes, ","))
+ }
+ return AuthResult{
+ Account: account,
+ IDToken: tokenResponse.IDToken,
+ AccessToken: tokenResponse.AccessToken,
+ ExpiresOn: tokenResponse.ExpiresOn.T,
+ GrantedScopes: tokenResponse.GrantedScopes.Slice,
+ }, nil
+}
+
+// Client is a base client that provides access to common methods and primatives that
+// can be used by multiple clients.
+type Client struct {
+ Token *oauth.Client
+ manager accountManager // *storage.Manager or fakeManager in tests
+ // pmanager is a partitioned cache for OBO authentication. *storage.PartitionedManager or fakeManager in tests
+ pmanager manager
+
+ AuthParams authority.AuthParams // DO NOT EVER MAKE THIS A POINTER! See "Note" in New().
+ cacheAccessor cache.ExportReplace
+ cacheAccessorMu *sync.RWMutex
+}
+
+// Option is an optional argument to the New constructor.
+type Option func(c *Client) error
+
+// WithCacheAccessor allows you to set some type of cache for storing authentication tokens.
+func WithCacheAccessor(ca cache.ExportReplace) Option {
+ return func(c *Client) error {
+ if ca != nil {
+ c.cacheAccessor = ca
+ }
+ return nil
+ }
+}
+
+// WithClientCapabilities allows configuring one or more client capabilities such as "CP1"
+func WithClientCapabilities(capabilities []string) Option {
+ return func(c *Client) error {
+ var err error
+ if len(capabilities) > 0 {
+ cc, err := authority.NewClientCapabilities(capabilities)
+ if err == nil {
+ c.AuthParams.Capabilities = cc
+ }
+ }
+ return err
+ }
+}
+
+// WithKnownAuthorityHosts specifies hosts Client shouldn't validate or request metadata for because they're known to the user
+func WithKnownAuthorityHosts(hosts []string) Option {
+ return func(c *Client) error {
+ cp := make([]string, len(hosts))
+ copy(cp, hosts)
+ c.AuthParams.KnownAuthorityHosts = cp
+ return nil
+ }
+}
+
+// WithX5C specifies if x5c claim(public key of the certificate) should be sent to STS to enable Subject Name Issuer Authentication.
+func WithX5C(sendX5C bool) Option {
+ return func(c *Client) error {
+ c.AuthParams.SendX5C = sendX5C
+ return nil
+ }
+}
+
+func WithRegionDetection(region string) Option {
+ return func(c *Client) error {
+ c.AuthParams.AuthorityInfo.Region = region
+ return nil
+ }
+}
+
+func WithInstanceDiscovery(instanceDiscoveryEnabled bool) Option {
+ return func(c *Client) error {
+ c.AuthParams.AuthorityInfo.ValidateAuthority = instanceDiscoveryEnabled
+ c.AuthParams.AuthorityInfo.InstanceDiscoveryDisabled = !instanceDiscoveryEnabled
+ return nil
+ }
+}
+
+// New is the constructor for Base.
+func New(clientID string, authorityURI string, token *oauth.Client, options ...Option) (Client, error) {
+ //By default, validateAuthority is set to true and instanceDiscoveryDisabled is set to false
+ authInfo, err := authority.NewInfoFromAuthorityURI(authorityURI, true, false)
+ if err != nil {
+ return Client{}, err
+ }
+ authParams := authority.NewAuthParams(clientID, authInfo)
+ client := Client{ // Note: Hey, don't even THINK about making Base into *Base. See "design notes" in public.go and confidential.go
+ Token: token,
+ AuthParams: authParams,
+ cacheAccessorMu: &sync.RWMutex{},
+ manager: storage.New(token),
+ pmanager: storage.NewPartitionedManager(token),
+ }
+ for _, o := range options {
+ if err = o(&client); err != nil {
+ break
+ }
+ }
+ return client, err
+
+}
+
+// AuthCodeURL creates a URL used to acquire an authorization code.
+func (b Client) AuthCodeURL(ctx context.Context, clientID, redirectURI string, scopes []string, authParams authority.AuthParams) (string, error) {
+ endpoints, err := b.Token.ResolveEndpoints(ctx, authParams.AuthorityInfo, "")
+ if err != nil {
+ return "", err
+ }
+
+ baseURL, err := url.Parse(endpoints.AuthorizationEndpoint)
+ if err != nil {
+ return "", err
+ }
+
+ claims, err := authParams.MergeCapabilitiesAndClaims()
+ if err != nil {
+ return "", err
+ }
+
+ v := url.Values{}
+ v.Add("client_id", clientID)
+ v.Add("response_type", "code")
+ v.Add("redirect_uri", redirectURI)
+ v.Add("scope", strings.Join(scopes, scopeSeparator))
+ if authParams.State != "" {
+ v.Add("state", authParams.State)
+ }
+ if claims != "" {
+ v.Add("claims", claims)
+ }
+ if authParams.CodeChallenge != "" {
+ v.Add("code_challenge", authParams.CodeChallenge)
+ }
+ if authParams.CodeChallengeMethod != "" {
+ v.Add("code_challenge_method", authParams.CodeChallengeMethod)
+ }
+ if authParams.LoginHint != "" {
+ v.Add("login_hint", authParams.LoginHint)
+ }
+ if authParams.Prompt != "" {
+ v.Add("prompt", authParams.Prompt)
+ }
+ if authParams.DomainHint != "" {
+ v.Add("domain_hint", authParams.DomainHint)
+ }
+ // There were left over from an implementation that didn't use any of these. We may
+ // need to add them later, but as of now aren't needed.
+ /*
+ if p.ResponseMode != "" {
+ urlParams.Add("response_mode", p.ResponseMode)
+ }
+ */
+ baseURL.RawQuery = v.Encode()
+ return baseURL.String(), nil
+}
+
+func (b Client) AcquireTokenSilent(ctx context.Context, silent AcquireTokenSilentParameters) (AuthResult, error) {
+ ar := AuthResult{}
+ // when tenant == "", the caller didn't specify a tenant and WithTenant will choose the client's configured tenant
+ tenant := silent.TenantID
+ authParams, err := b.AuthParams.WithTenant(tenant)
+ if err != nil {
+ return ar, err
+ }
+ authParams.Scopes = silent.Scopes
+ authParams.HomeAccountID = silent.Account.HomeAccountID
+ authParams.AuthorizationType = silent.AuthorizationType
+ authParams.Claims = silent.Claims
+ authParams.UserAssertion = silent.UserAssertion
+ if silent.AuthnScheme != nil {
+ authParams.AuthnScheme = silent.AuthnScheme
+ }
+
+ m := b.pmanager
+ if authParams.AuthorizationType != authority.ATOnBehalfOf {
+ authParams.AuthorizationType = authority.ATRefreshToken
+ m = b.manager
+ }
+ if b.cacheAccessor != nil {
+ key := authParams.CacheKey(silent.IsAppCache)
+ b.cacheAccessorMu.RLock()
+ err = b.cacheAccessor.Replace(ctx, m, cache.ReplaceHints{PartitionKey: key})
+ b.cacheAccessorMu.RUnlock()
+ }
+ if err != nil {
+ return ar, err
+ }
+ storageTokenResponse, err := m.Read(ctx, authParams)
+ if err != nil {
+ return ar, err
+ }
+
+ // ignore cached access tokens when given claims
+ if silent.Claims == "" {
+ ar, err = AuthResultFromStorage(storageTokenResponse)
+ if err == nil {
+ ar.AccessToken, err = authParams.AuthnScheme.FormatAccessToken(ar.AccessToken)
+ return ar, err
+ }
+ }
+
+ // redeem a cached refresh token, if available
+ if reflect.ValueOf(storageTokenResponse.RefreshToken).IsZero() {
+ return ar, errors.New("no token found")
+ }
+ var cc *accesstokens.Credential
+ if silent.RequestType == accesstokens.ATConfidential {
+ cc = silent.Credential
+ }
+ token, err := b.Token.Refresh(ctx, silent.RequestType, authParams, cc, storageTokenResponse.RefreshToken)
+ if err != nil {
+ return ar, err
+ }
+ return b.AuthResultFromToken(ctx, authParams, token, true)
+}
+
+func (b Client) AcquireTokenByAuthCode(ctx context.Context, authCodeParams AcquireTokenAuthCodeParameters) (AuthResult, error) {
+ authParams, err := b.AuthParams.WithTenant(authCodeParams.TenantID)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ authParams.Claims = authCodeParams.Claims
+ authParams.Scopes = authCodeParams.Scopes
+ authParams.Redirecturi = authCodeParams.RedirectURI
+ authParams.AuthorizationType = authority.ATAuthCode
+
+ var cc *accesstokens.Credential
+ if authCodeParams.AppType == accesstokens.ATConfidential {
+ cc = authCodeParams.Credential
+ authParams.IsConfidentialClient = true
+ }
+
+ req, err := accesstokens.NewCodeChallengeRequest(authParams, authCodeParams.AppType, cc, authCodeParams.Code, authCodeParams.Challenge)
+ if err != nil {
+ return AuthResult{}, err
+ }
+
+ token, err := b.Token.AuthCode(ctx, req)
+ if err != nil {
+ return AuthResult{}, err
+ }
+
+ return b.AuthResultFromToken(ctx, authParams, token, true)
+}
+
+// AcquireTokenOnBehalfOf acquires a security token for an app using middle tier apps access token.
+func (b Client) AcquireTokenOnBehalfOf(ctx context.Context, onBehalfOfParams AcquireTokenOnBehalfOfParameters) (AuthResult, error) {
+ var ar AuthResult
+ silentParameters := AcquireTokenSilentParameters{
+ Scopes: onBehalfOfParams.Scopes,
+ RequestType: accesstokens.ATConfidential,
+ Credential: onBehalfOfParams.Credential,
+ UserAssertion: onBehalfOfParams.UserAssertion,
+ AuthorizationType: authority.ATOnBehalfOf,
+ TenantID: onBehalfOfParams.TenantID,
+ Claims: onBehalfOfParams.Claims,
+ }
+ ar, err := b.AcquireTokenSilent(ctx, silentParameters)
+ if err == nil {
+ return ar, err
+ }
+ authParams, err := b.AuthParams.WithTenant(onBehalfOfParams.TenantID)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ authParams.AuthorizationType = authority.ATOnBehalfOf
+ authParams.Claims = onBehalfOfParams.Claims
+ authParams.Scopes = onBehalfOfParams.Scopes
+ authParams.UserAssertion = onBehalfOfParams.UserAssertion
+ token, err := b.Token.OnBehalfOf(ctx, authParams, onBehalfOfParams.Credential)
+ if err == nil {
+ ar, err = b.AuthResultFromToken(ctx, authParams, token, true)
+ }
+ return ar, err
+}
+
+func (b Client) AuthResultFromToken(ctx context.Context, authParams authority.AuthParams, token accesstokens.TokenResponse, cacheWrite bool) (AuthResult, error) {
+ if !cacheWrite {
+ return NewAuthResult(token, shared.Account{})
+ }
+ var m manager = b.manager
+ if authParams.AuthorizationType == authority.ATOnBehalfOf {
+ m = b.pmanager
+ }
+ key := token.CacheKey(authParams)
+ if b.cacheAccessor != nil {
+ b.cacheAccessorMu.Lock()
+ defer b.cacheAccessorMu.Unlock()
+ err := b.cacheAccessor.Replace(ctx, m, cache.ReplaceHints{PartitionKey: key})
+ if err != nil {
+ return AuthResult{}, err
+ }
+ }
+ account, err := m.Write(authParams, token)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ ar, err := NewAuthResult(token, account)
+ if err == nil && b.cacheAccessor != nil {
+ err = b.cacheAccessor.Export(ctx, b.manager, cache.ExportHints{PartitionKey: key})
+ }
+ if err != nil {
+ return AuthResult{}, err
+ }
+
+ ar.AccessToken, err = authParams.AuthnScheme.FormatAccessToken(ar.AccessToken)
+ return ar, err
+}
+
+func (b Client) AllAccounts(ctx context.Context) ([]shared.Account, error) {
+ if b.cacheAccessor != nil {
+ b.cacheAccessorMu.RLock()
+ defer b.cacheAccessorMu.RUnlock()
+ key := b.AuthParams.CacheKey(false)
+ err := b.cacheAccessor.Replace(ctx, b.manager, cache.ReplaceHints{PartitionKey: key})
+ if err != nil {
+ return nil, err
+ }
+ }
+ return b.manager.AllAccounts(), nil
+}
+
+func (b Client) Account(ctx context.Context, homeAccountID string) (shared.Account, error) {
+ if b.cacheAccessor != nil {
+ b.cacheAccessorMu.RLock()
+ defer b.cacheAccessorMu.RUnlock()
+ authParams := b.AuthParams // This is a copy, as we don't have a pointer receiver and .AuthParams is not a pointer.
+ authParams.AuthorizationType = authority.AccountByID
+ authParams.HomeAccountID = homeAccountID
+ key := b.AuthParams.CacheKey(false)
+ err := b.cacheAccessor.Replace(ctx, b.manager, cache.ReplaceHints{PartitionKey: key})
+ if err != nil {
+ return shared.Account{}, err
+ }
+ }
+ return b.manager.Account(homeAccountID), nil
+}
+
+// RemoveAccount removes all the ATs, RTs and IDTs from the cache associated with this account.
+func (b Client) RemoveAccount(ctx context.Context, account shared.Account) error {
+ if b.cacheAccessor == nil {
+ b.manager.RemoveAccount(account, b.AuthParams.ClientID)
+ return nil
+ }
+ b.cacheAccessorMu.Lock()
+ defer b.cacheAccessorMu.Unlock()
+ key := b.AuthParams.CacheKey(false)
+ err := b.cacheAccessor.Replace(ctx, b.manager, cache.ReplaceHints{PartitionKey: key})
+ if err != nil {
+ return err
+ }
+ b.manager.RemoveAccount(account, b.AuthParams.ClientID)
+ return b.cacheAccessor.Export(ctx, b.manager, cache.ExportHints{PartitionKey: key})
+}
@@ -0,0 +1,213 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package storage
+
+import (
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+ "time"
+
+ internalTime "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/types/time"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared"
+)
+
+// Contract is the JSON structure that is written to any storage medium when serializing
+// the internal cache. This design is shared between MSAL versions in many languages.
+// This cannot be changed without design that includes other SDKs.
+type Contract struct {
+ AccessTokens map[string]AccessToken `json:"AccessToken,omitempty"`
+ RefreshTokens map[string]accesstokens.RefreshToken `json:"RefreshToken,omitempty"`
+ IDTokens map[string]IDToken `json:"IdToken,omitempty"`
+ Accounts map[string]shared.Account `json:"Account,omitempty"`
+ AppMetaData map[string]AppMetaData `json:"AppMetadata,omitempty"`
+
+ AdditionalFields map[string]interface{}
+}
+
+// Contract is the JSON structure that is written to any storage medium when serializing
+// the internal cache. This design is shared between MSAL versions in many languages.
+// This cannot be changed without design that includes other SDKs.
+type InMemoryContract struct {
+ AccessTokensPartition map[string]map[string]AccessToken
+ RefreshTokensPartition map[string]map[string]accesstokens.RefreshToken
+ IDTokensPartition map[string]map[string]IDToken
+ AccountsPartition map[string]map[string]shared.Account
+ AppMetaData map[string]AppMetaData
+}
+
+// NewContract is the constructor for Contract.
+func NewInMemoryContract() *InMemoryContract {
+ return &InMemoryContract{
+ AccessTokensPartition: map[string]map[string]AccessToken{},
+ RefreshTokensPartition: map[string]map[string]accesstokens.RefreshToken{},
+ IDTokensPartition: map[string]map[string]IDToken{},
+ AccountsPartition: map[string]map[string]shared.Account{},
+ AppMetaData: map[string]AppMetaData{},
+ }
+}
+
+// NewContract is the constructor for Contract.
+func NewContract() *Contract {
+ return &Contract{
+ AccessTokens: map[string]AccessToken{},
+ RefreshTokens: map[string]accesstokens.RefreshToken{},
+ IDTokens: map[string]IDToken{},
+ Accounts: map[string]shared.Account{},
+ AppMetaData: map[string]AppMetaData{},
+ AdditionalFields: map[string]interface{}{},
+ }
+}
+
+// AccessToken is the JSON representation of a MSAL access token for encoding to storage.
+type AccessToken struct {
+ HomeAccountID string `json:"home_account_id,omitempty"`
+ Environment string `json:"environment,omitempty"`
+ Realm string `json:"realm,omitempty"`
+ CredentialType string `json:"credential_type,omitempty"`
+ ClientID string `json:"client_id,omitempty"`
+ Secret string `json:"secret,omitempty"`
+ Scopes string `json:"target,omitempty"`
+ ExpiresOn internalTime.Unix `json:"expires_on,omitempty"`
+ ExtendedExpiresOn internalTime.Unix `json:"extended_expires_on,omitempty"`
+ CachedAt internalTime.Unix `json:"cached_at,omitempty"`
+ UserAssertionHash string `json:"user_assertion_hash,omitempty"`
+ TokenType string `json:"token_type,omitempty"`
+ AuthnSchemeKeyID string `json:"keyid,omitempty"`
+
+ AdditionalFields map[string]interface{}
+}
+
+// NewAccessToken is the constructor for AccessToken.
+func NewAccessToken(homeID, env, realm, clientID string, cachedAt, expiresOn, extendedExpiresOn time.Time, scopes, token, tokenType, authnSchemeKeyID string) AccessToken {
+ return AccessToken{
+ HomeAccountID: homeID,
+ Environment: env,
+ Realm: realm,
+ CredentialType: "AccessToken",
+ ClientID: clientID,
+ Secret: token,
+ Scopes: scopes,
+ CachedAt: internalTime.Unix{T: cachedAt.UTC()},
+ ExpiresOn: internalTime.Unix{T: expiresOn.UTC()},
+ ExtendedExpiresOn: internalTime.Unix{T: extendedExpiresOn.UTC()},
+ TokenType: tokenType,
+ AuthnSchemeKeyID: authnSchemeKeyID,
+ }
+}
+
+// Key outputs the key that can be used to uniquely look up this entry in a map.
+func (a AccessToken) Key() string {
+ key := strings.Join(
+ []string{a.HomeAccountID, a.Environment, a.CredentialType, a.ClientID, a.Realm, a.Scopes},
+ shared.CacheKeySeparator,
+ )
+ // add token type to key for new access tokens types. skip for bearer token type to
+ // preserve fwd and back compat between a common cache and msal clients
+ if !strings.EqualFold(a.TokenType, authority.AccessTokenTypeBearer) {
+ key = strings.Join([]string{key, a.TokenType}, shared.CacheKeySeparator)
+ }
+ return strings.ToLower(key)
+}
+
+// FakeValidate enables tests to fake access token validation
+var FakeValidate func(AccessToken) error
+
+// Validate validates that this AccessToken can be used.
+func (a AccessToken) Validate() error {
+ if FakeValidate != nil {
+ return FakeValidate(a)
+ }
+ if a.CachedAt.T.After(time.Now()) {
+ return errors.New("access token isn't valid, it was cached at a future time")
+ }
+ if a.ExpiresOn.T.Before(time.Now().Add(5 * time.Minute)) {
+ return fmt.Errorf("access token is expired")
+ }
+ if a.CachedAt.T.IsZero() {
+ return fmt.Errorf("access token does not have CachedAt set")
+ }
+ return nil
+}
+
+// IDToken is the JSON representation of an MSAL id token for encoding to storage.
+type IDToken struct {
+ HomeAccountID string `json:"home_account_id,omitempty"`
+ Environment string `json:"environment,omitempty"`
+ Realm string `json:"realm,omitempty"`
+ CredentialType string `json:"credential_type,omitempty"`
+ ClientID string `json:"client_id,omitempty"`
+ Secret string `json:"secret,omitempty"`
+ UserAssertionHash string `json:"user_assertion_hash,omitempty"`
+ AdditionalFields map[string]interface{}
+}
+
+// IsZero determines if IDToken is the zero value.
+func (i IDToken) IsZero() bool {
+ v := reflect.ValueOf(i)
+ for i := 0; i < v.NumField(); i++ {
+ field := v.Field(i)
+ if !field.IsZero() {
+ switch field.Kind() {
+ case reflect.Map, reflect.Slice:
+ if field.Len() == 0 {
+ continue
+ }
+ }
+ return false
+ }
+ }
+ return true
+}
+
+// NewIDToken is the constructor for IDToken.
+func NewIDToken(homeID, env, realm, clientID, idToken string) IDToken {
+ return IDToken{
+ HomeAccountID: homeID,
+ Environment: env,
+ Realm: realm,
+ CredentialType: "IDToken",
+ ClientID: clientID,
+ Secret: idToken,
+ }
+}
+
+// Key outputs the key that can be used to uniquely look up this entry in a map.
+func (id IDToken) Key() string {
+ key := strings.Join(
+ []string{id.HomeAccountID, id.Environment, id.CredentialType, id.ClientID, id.Realm},
+ shared.CacheKeySeparator,
+ )
+ return strings.ToLower(key)
+}
+
+// AppMetaData is the JSON representation of application metadata for encoding to storage.
+type AppMetaData struct {
+ FamilyID string `json:"family_id,omitempty"`
+ ClientID string `json:"client_id,omitempty"`
+ Environment string `json:"environment,omitempty"`
+
+ AdditionalFields map[string]interface{}
+}
+
+// NewAppMetaData is the constructor for AppMetaData.
+func NewAppMetaData(familyID, clientID, environment string) AppMetaData {
+ return AppMetaData{
+ FamilyID: familyID,
+ ClientID: clientID,
+ Environment: environment,
+ }
+}
+
+// Key outputs the key that can be used to uniquely look up this entry in a map.
+func (a AppMetaData) Key() string {
+ key := strings.Join(
+ []string{"AppMetaData", a.Environment, a.ClientID},
+ shared.CacheKeySeparator,
+ )
+ return strings.ToLower(key)
+}
@@ -0,0 +1,442 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package storage
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared"
+)
+
+// PartitionedManager is a partitioned in-memory cache of access tokens, accounts and meta data.
+type PartitionedManager struct {
+ contract *InMemoryContract
+ contractMu sync.RWMutex
+ requests aadInstanceDiscoveryer // *oauth.Token
+
+ aadCacheMu sync.RWMutex
+ aadCache map[string]authority.InstanceDiscoveryMetadata
+}
+
+// NewPartitionedManager is the constructor for PartitionedManager.
+func NewPartitionedManager(requests *oauth.Client) *PartitionedManager {
+ m := &PartitionedManager{requests: requests, aadCache: make(map[string]authority.InstanceDiscoveryMetadata)}
+ m.contract = NewInMemoryContract()
+ return m
+}
+
+// Read reads a storage token from the cache if it exists.
+func (m *PartitionedManager) Read(ctx context.Context, authParameters authority.AuthParams) (TokenResponse, error) {
+ tr := TokenResponse{}
+ realm := authParameters.AuthorityInfo.Tenant
+ clientID := authParameters.ClientID
+ scopes := authParameters.Scopes
+ authnSchemeKeyID := authParameters.AuthnScheme.KeyID()
+ tokenType := authParameters.AuthnScheme.AccessTokenType()
+
+ // fetch metadata if instanceDiscovery is enabled
+ aliases := []string{authParameters.AuthorityInfo.Host}
+ if !authParameters.AuthorityInfo.InstanceDiscoveryDisabled {
+ metadata, err := m.getMetadataEntry(ctx, authParameters.AuthorityInfo)
+ if err != nil {
+ return TokenResponse{}, err
+ }
+ aliases = metadata.Aliases
+ }
+
+ userAssertionHash := authParameters.AssertionHash()
+ partitionKeyFromRequest := userAssertionHash
+
+ // errors returned by read* methods indicate a cache miss and are therefore non-fatal. We continue populating
+ // TokenResponse fields so that e.g. lack of an ID token doesn't prevent the caller from receiving a refresh token.
+ accessToken, err := m.readAccessToken(aliases, realm, clientID, userAssertionHash, scopes, partitionKeyFromRequest, tokenType, authnSchemeKeyID)
+ if err == nil {
+ tr.AccessToken = accessToken
+ }
+ idToken, err := m.readIDToken(aliases, realm, clientID, userAssertionHash, getPartitionKeyIDTokenRead(accessToken))
+ if err == nil {
+ tr.IDToken = idToken
+ }
+
+ if appMetadata, err := m.readAppMetaData(aliases, clientID); err == nil {
+ // we need the family ID to identify the correct refresh token, if any
+ familyID := appMetadata.FamilyID
+ refreshToken, err := m.readRefreshToken(aliases, familyID, clientID, userAssertionHash, partitionKeyFromRequest)
+ if err == nil {
+ tr.RefreshToken = refreshToken
+ }
+ }
+
+ account, err := m.readAccount(aliases, realm, userAssertionHash, idToken.HomeAccountID)
+ if err == nil {
+ tr.Account = account
+ }
+ return tr, nil
+}
+
+// Write writes a token response to the cache and returns the account information the token is stored with.
+func (m *PartitionedManager) Write(authParameters authority.AuthParams, tokenResponse accesstokens.TokenResponse) (shared.Account, error) {
+ authParameters.HomeAccountID = tokenResponse.HomeAccountID()
+ homeAccountID := authParameters.HomeAccountID
+ environment := authParameters.AuthorityInfo.Host
+ realm := authParameters.AuthorityInfo.Tenant
+ clientID := authParameters.ClientID
+ target := strings.Join(tokenResponse.GrantedScopes.Slice, scopeSeparator)
+ userAssertionHash := authParameters.AssertionHash()
+ cachedAt := time.Now()
+ authnSchemeKeyID := authParameters.AuthnScheme.KeyID()
+ var account shared.Account
+
+ if len(tokenResponse.RefreshToken) > 0 {
+ refreshToken := accesstokens.NewRefreshToken(homeAccountID, environment, clientID, tokenResponse.RefreshToken, tokenResponse.FamilyID)
+ if authParameters.AuthorizationType == authority.ATOnBehalfOf {
+ refreshToken.UserAssertionHash = userAssertionHash
+ }
+ if err := m.writeRefreshToken(refreshToken, getPartitionKeyRefreshToken(refreshToken)); err != nil {
+ return account, err
+ }
+ }
+
+ if len(tokenResponse.AccessToken) > 0 {
+ accessToken := NewAccessToken(
+ homeAccountID,
+ environment,
+ realm,
+ clientID,
+ cachedAt,
+ tokenResponse.ExpiresOn.T,
+ tokenResponse.ExtExpiresOn.T,
+ target,
+ tokenResponse.AccessToken,
+ tokenResponse.TokenType,
+ authnSchemeKeyID,
+ )
+ if authParameters.AuthorizationType == authority.ATOnBehalfOf {
+ accessToken.UserAssertionHash = userAssertionHash // get Hash method on this
+ }
+
+ // Since we have a valid access token, cache it before moving on.
+ if err := accessToken.Validate(); err == nil {
+ if err := m.writeAccessToken(accessToken, getPartitionKeyAccessToken(accessToken)); err != nil {
+ return account, err
+ }
+ } else {
+ return shared.Account{}, err
+ }
+ }
+
+ idTokenJwt := tokenResponse.IDToken
+ if !idTokenJwt.IsZero() {
+ idToken := NewIDToken(homeAccountID, environment, realm, clientID, idTokenJwt.RawToken)
+ if authParameters.AuthorizationType == authority.ATOnBehalfOf {
+ idToken.UserAssertionHash = userAssertionHash
+ }
+ if err := m.writeIDToken(idToken, getPartitionKeyIDToken(idToken)); err != nil {
+ return shared.Account{}, err
+ }
+
+ localAccountID := idTokenJwt.LocalAccountID()
+ authorityType := authParameters.AuthorityInfo.AuthorityType
+
+ preferredUsername := idTokenJwt.UPN
+ if idTokenJwt.PreferredUsername != "" {
+ preferredUsername = idTokenJwt.PreferredUsername
+ }
+
+ account = shared.NewAccount(
+ homeAccountID,
+ environment,
+ realm,
+ localAccountID,
+ authorityType,
+ preferredUsername,
+ )
+ if authParameters.AuthorizationType == authority.ATOnBehalfOf {
+ account.UserAssertionHash = userAssertionHash
+ }
+ if err := m.writeAccount(account, getPartitionKeyAccount(account)); err != nil {
+ return shared.Account{}, err
+ }
+ }
+
+ AppMetaData := NewAppMetaData(tokenResponse.FamilyID, clientID, environment)
+
+ if err := m.writeAppMetaData(AppMetaData); err != nil {
+ return shared.Account{}, err
+ }
+ return account, nil
+}
+
+func (m *PartitionedManager) getMetadataEntry(ctx context.Context, authorityInfo authority.Info) (authority.InstanceDiscoveryMetadata, error) {
+ md, err := m.aadMetadataFromCache(ctx, authorityInfo)
+ if err != nil {
+ // not in the cache, retrieve it
+ md, err = m.aadMetadata(ctx, authorityInfo)
+ }
+ return md, err
+}
+
+func (m *PartitionedManager) aadMetadataFromCache(ctx context.Context, authorityInfo authority.Info) (authority.InstanceDiscoveryMetadata, error) {
+ m.aadCacheMu.RLock()
+ defer m.aadCacheMu.RUnlock()
+ metadata, ok := m.aadCache[authorityInfo.Host]
+ if ok {
+ return metadata, nil
+ }
+ return metadata, errors.New("not found")
+}
+
+func (m *PartitionedManager) aadMetadata(ctx context.Context, authorityInfo authority.Info) (authority.InstanceDiscoveryMetadata, error) {
+ discoveryResponse, err := m.requests.AADInstanceDiscovery(ctx, authorityInfo)
+ if err != nil {
+ return authority.InstanceDiscoveryMetadata{}, err
+ }
+
+ m.aadCacheMu.Lock()
+ defer m.aadCacheMu.Unlock()
+
+ for _, metadataEntry := range discoveryResponse.Metadata {
+ for _, aliasedAuthority := range metadataEntry.Aliases {
+ m.aadCache[aliasedAuthority] = metadataEntry
+ }
+ }
+ if _, ok := m.aadCache[authorityInfo.Host]; !ok {
+ m.aadCache[authorityInfo.Host] = authority.InstanceDiscoveryMetadata{
+ PreferredNetwork: authorityInfo.Host,
+ PreferredCache: authorityInfo.Host,
+ }
+ }
+ return m.aadCache[authorityInfo.Host], nil
+}
+
+func (m *PartitionedManager) readAccessToken(envAliases []string, realm, clientID, userAssertionHash string, scopes []string, partitionKey, tokenType, authnSchemeKeyID string) (AccessToken, error) {
+ m.contractMu.RLock()
+ defer m.contractMu.RUnlock()
+ if accessTokens, ok := m.contract.AccessTokensPartition[partitionKey]; ok {
+ // TODO: linear search (over a map no less) is slow for a large number (thousands) of tokens.
+ // this shows up as the dominating node in a profile. for real-world scenarios this likely isn't
+ // an issue, however if it does become a problem then we know where to look.
+ for _, at := range accessTokens {
+ if at.Realm == realm && at.ClientID == clientID && at.UserAssertionHash == userAssertionHash {
+ if at.TokenType == tokenType && at.AuthnSchemeKeyID == authnSchemeKeyID {
+ if checkAlias(at.Environment, envAliases) {
+ if isMatchingScopes(scopes, at.Scopes) {
+ return at, nil
+ }
+ }
+ }
+ }
+ }
+ }
+ return AccessToken{}, fmt.Errorf("access token not found")
+}
+
+func (m *PartitionedManager) writeAccessToken(accessToken AccessToken, partitionKey string) error {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ key := accessToken.Key()
+ if m.contract.AccessTokensPartition[partitionKey] == nil {
+ m.contract.AccessTokensPartition[partitionKey] = make(map[string]AccessToken)
+ }
+ m.contract.AccessTokensPartition[partitionKey][key] = accessToken
+ return nil
+}
+
+func matchFamilyRefreshTokenObo(rt accesstokens.RefreshToken, userAssertionHash string, envAliases []string) bool {
+ return rt.UserAssertionHash == userAssertionHash && checkAlias(rt.Environment, envAliases) && rt.FamilyID != ""
+}
+
+func matchClientIDRefreshTokenObo(rt accesstokens.RefreshToken, userAssertionHash string, envAliases []string, clientID string) bool {
+ return rt.UserAssertionHash == userAssertionHash && checkAlias(rt.Environment, envAliases) && rt.ClientID == clientID
+}
+
+func (m *PartitionedManager) readRefreshToken(envAliases []string, familyID, clientID, userAssertionHash, partitionKey string) (accesstokens.RefreshToken, error) {
+ byFamily := func(rt accesstokens.RefreshToken) bool {
+ return matchFamilyRefreshTokenObo(rt, userAssertionHash, envAliases)
+ }
+ byClient := func(rt accesstokens.RefreshToken) bool {
+ return matchClientIDRefreshTokenObo(rt, userAssertionHash, envAliases, clientID)
+ }
+
+ var matchers []func(rt accesstokens.RefreshToken) bool
+ if familyID == "" {
+ matchers = []func(rt accesstokens.RefreshToken) bool{
+ byClient, byFamily,
+ }
+ } else {
+ matchers = []func(rt accesstokens.RefreshToken) bool{
+ byFamily, byClient,
+ }
+ }
+
+ // TODO(keegan): All the tests here pass, but Bogdan says this is
+ // more complicated. I'm opening an issue for this to have him
+ // review the tests and suggest tests that would break this so
+ // we can re-write against good tests. His comments as follow:
+ // The algorithm is a bit more complex than this, I assume there are some tests covering everything. I would keep the order as is.
+ // The algorithm is:
+ // If application is NOT part of the family, search by client_ID
+ // If app is part of the family or if we DO NOT KNOW if it's part of the family, search by family ID, then by client_id (we will know if an app is part of the family after the first token response).
+ // https://github.com/AzureAD/microsoft-authentication-library-for-dotnet/blob/311fe8b16e7c293462806f397e189a6aa1159769/src/client/Microsoft.Identity.Client/Internal/Requests/Silent/CacheSilentStrategy.cs#L95
+ m.contractMu.RLock()
+ defer m.contractMu.RUnlock()
+ for _, matcher := range matchers {
+ for _, rt := range m.contract.RefreshTokensPartition[partitionKey] {
+ if matcher(rt) {
+ return rt, nil
+ }
+ }
+ }
+
+ return accesstokens.RefreshToken{}, fmt.Errorf("refresh token not found")
+}
+
+func (m *PartitionedManager) writeRefreshToken(refreshToken accesstokens.RefreshToken, partitionKey string) error {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ key := refreshToken.Key()
+ if m.contract.AccessTokensPartition[partitionKey] == nil {
+ m.contract.RefreshTokensPartition[partitionKey] = make(map[string]accesstokens.RefreshToken)
+ }
+ m.contract.RefreshTokensPartition[partitionKey][key] = refreshToken
+ return nil
+}
+
+func (m *PartitionedManager) readIDToken(envAliases []string, realm, clientID, userAssertionHash, partitionKey string) (IDToken, error) {
+ m.contractMu.RLock()
+ defer m.contractMu.RUnlock()
+ for _, idt := range m.contract.IDTokensPartition[partitionKey] {
+ if idt.Realm == realm && idt.ClientID == clientID && idt.UserAssertionHash == userAssertionHash {
+ if checkAlias(idt.Environment, envAliases) {
+ return idt, nil
+ }
+ }
+ }
+ return IDToken{}, fmt.Errorf("token not found")
+}
+
+func (m *PartitionedManager) writeIDToken(idToken IDToken, partitionKey string) error {
+ key := idToken.Key()
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ if m.contract.IDTokensPartition[partitionKey] == nil {
+ m.contract.IDTokensPartition[partitionKey] = make(map[string]IDToken)
+ }
+ m.contract.IDTokensPartition[partitionKey][key] = idToken
+ return nil
+}
+
+func (m *PartitionedManager) readAccount(envAliases []string, realm, UserAssertionHash, partitionKey string) (shared.Account, error) {
+ m.contractMu.RLock()
+ defer m.contractMu.RUnlock()
+
+ // You might ask why, if cache.Accounts is a map, we would loop through all of these instead of using a key.
+ // We only use a map because the storage contract shared between all language implementations says use a map.
+ // We can't change that. The other is because the keys are made using a specific "env", but here we are allowing
+ // a match in multiple envs (envAlias). That means we either need to hash each possible keyand do the lookup
+ // or just statically check. Since the design is to have a storage.Manager per user, the amount of keys stored
+ // is really low (say 2). Each hash is more expensive than the entire iteration.
+ for _, acc := range m.contract.AccountsPartition[partitionKey] {
+ if checkAlias(acc.Environment, envAliases) && acc.UserAssertionHash == UserAssertionHash && acc.Realm == realm {
+ return acc, nil
+ }
+ }
+ return shared.Account{}, fmt.Errorf("account not found")
+}
+
+func (m *PartitionedManager) writeAccount(account shared.Account, partitionKey string) error {
+ key := account.Key()
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ if m.contract.AccountsPartition[partitionKey] == nil {
+ m.contract.AccountsPartition[partitionKey] = make(map[string]shared.Account)
+ }
+ m.contract.AccountsPartition[partitionKey][key] = account
+ return nil
+}
+
+func (m *PartitionedManager) readAppMetaData(envAliases []string, clientID string) (AppMetaData, error) {
+ m.contractMu.RLock()
+ defer m.contractMu.RUnlock()
+
+ for _, app := range m.contract.AppMetaData {
+ if checkAlias(app.Environment, envAliases) && app.ClientID == clientID {
+ return app, nil
+ }
+ }
+ return AppMetaData{}, fmt.Errorf("not found")
+}
+
+func (m *PartitionedManager) writeAppMetaData(AppMetaData AppMetaData) error {
+ key := AppMetaData.Key()
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ m.contract.AppMetaData[key] = AppMetaData
+ return nil
+}
+
+// update updates the internal cache object. This is for use in tests, other uses are not
+// supported.
+func (m *PartitionedManager) update(cache *InMemoryContract) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ m.contract = cache
+}
+
+// Marshal implements cache.Marshaler.
+func (m *PartitionedManager) Marshal() ([]byte, error) {
+ return json.Marshal(m.contract)
+}
+
+// Unmarshal implements cache.Unmarshaler.
+func (m *PartitionedManager) Unmarshal(b []byte) error {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+
+ contract := NewInMemoryContract()
+
+ err := json.Unmarshal(b, contract)
+ if err != nil {
+ return err
+ }
+
+ m.contract = contract
+
+ return nil
+}
+
+func getPartitionKeyAccessToken(item AccessToken) string {
+ if item.UserAssertionHash != "" {
+ return item.UserAssertionHash
+ }
+ return item.HomeAccountID
+}
+
+func getPartitionKeyRefreshToken(item accesstokens.RefreshToken) string {
+ if item.UserAssertionHash != "" {
+ return item.UserAssertionHash
+ }
+ return item.HomeAccountID
+}
+
+func getPartitionKeyIDToken(item IDToken) string {
+ return item.HomeAccountID
+}
+
+func getPartitionKeyAccount(item shared.Account) string {
+ return item.HomeAccountID
+}
+
+func getPartitionKeyIDTokenRead(item AccessToken) string {
+ return item.HomeAccountID
+}
@@ -0,0 +1,583 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// Package storage holds all cached token information for MSAL. This storage can be
+// augmented with third-party extensions to provide persistent storage. In that case,
+// reads and writes in upper packages will call Marshal() to take the entire in-memory
+// representation and write it to storage and Unmarshal() to update the entire in-memory
+// storage with what was in the persistent storage. The persistent storage can only be
+// accessed in this way because multiple MSAL clients written in multiple languages can
+// access the same storage and must adhere to the same method that was defined
+// previously.
+package storage
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared"
+)
+
+// aadInstanceDiscoveryer allows faking in tests.
+// It is implemented in production by ops/authority.Client
+type aadInstanceDiscoveryer interface {
+ AADInstanceDiscovery(ctx context.Context, authorityInfo authority.Info) (authority.InstanceDiscoveryResponse, error)
+}
+
+// TokenResponse mimics a token response that was pulled from the cache.
+type TokenResponse struct {
+ RefreshToken accesstokens.RefreshToken
+ IDToken IDToken // *Credential
+ AccessToken AccessToken
+ Account shared.Account
+}
+
+// Manager is an in-memory cache of access tokens, accounts and meta data. This data is
+// updated on read/write calls. Unmarshal() replaces all data stored here with whatever
+// was given to it on each call.
+type Manager struct {
+ contract *Contract
+ contractMu sync.RWMutex
+ requests aadInstanceDiscoveryer // *oauth.Token
+
+ aadCacheMu sync.RWMutex
+ aadCache map[string]authority.InstanceDiscoveryMetadata
+}
+
+// New is the constructor for Manager.
+func New(requests *oauth.Client) *Manager {
+ m := &Manager{requests: requests, aadCache: make(map[string]authority.InstanceDiscoveryMetadata)}
+ m.contract = NewContract()
+ return m
+}
+
+func checkAlias(alias string, aliases []string) bool {
+ for _, v := range aliases {
+ if alias == v {
+ return true
+ }
+ }
+ return false
+}
+
+func isMatchingScopes(scopesOne []string, scopesTwo string) bool {
+ newScopesTwo := strings.Split(scopesTwo, scopeSeparator)
+ scopeCounter := 0
+ for _, scope := range scopesOne {
+ for _, otherScope := range newScopesTwo {
+ if strings.EqualFold(scope, otherScope) {
+ scopeCounter++
+ continue
+ }
+ }
+ }
+ return scopeCounter == len(scopesOne)
+}
+
+// needsUpgrade returns true if the given key follows the v1.0 schema i.e.,
+// it contains an uppercase character (v1.1+ keys are all lowercase)
+func needsUpgrade(key string) bool {
+ for _, r := range key {
+ if 'A' <= r && r <= 'Z' {
+ return true
+ }
+ }
+ return false
+}
+
+// upgrade a v1.0 cache item by adding a v1.1+ item having the same value and deleting
+// the v1.0 item. Callers must hold an exclusive lock on m.
+func upgrade[T any](m map[string]T, k string) T {
+ v1_1Key := strings.ToLower(k)
+ v, ok := m[k]
+ if !ok {
+ // another goroutine did the upgrade while this one was waiting for the write lock
+ return m[v1_1Key]
+ }
+ if v2, ok := m[v1_1Key]; ok {
+ // cache has an equivalent v1.1+ item, which we prefer because we know it was added
+ // by a newer version of the module and is therefore more likely to remain valid.
+ // The v1.0 item may have expired because only v1.0 or earlier would update it.
+ v = v2
+ } else {
+ // add an equivalent item according to the v1.1 schema
+ m[v1_1Key] = v
+ }
+ delete(m, k)
+ return v
+}
+
+// Read reads a storage token from the cache if it exists.
+func (m *Manager) Read(ctx context.Context, authParameters authority.AuthParams) (TokenResponse, error) {
+ tr := TokenResponse{}
+ homeAccountID := authParameters.HomeAccountID
+ realm := authParameters.AuthorityInfo.Tenant
+ clientID := authParameters.ClientID
+ scopes := authParameters.Scopes
+ authnSchemeKeyID := authParameters.AuthnScheme.KeyID()
+ tokenType := authParameters.AuthnScheme.AccessTokenType()
+
+ // fetch metadata if instanceDiscovery is enabled
+ aliases := []string{authParameters.AuthorityInfo.Host}
+ if !authParameters.AuthorityInfo.InstanceDiscoveryDisabled {
+ metadata, err := m.getMetadataEntry(ctx, authParameters.AuthorityInfo)
+ if err != nil {
+ return TokenResponse{}, err
+ }
+ aliases = metadata.Aliases
+ }
+
+ accessToken := m.readAccessToken(homeAccountID, aliases, realm, clientID, scopes, tokenType, authnSchemeKeyID)
+ tr.AccessToken = accessToken
+
+ if homeAccountID == "" {
+ // caller didn't specify a user, so there's no reason to search for an ID or refresh token
+ return tr, nil
+ }
+ // errors returned by read* methods indicate a cache miss and are therefore non-fatal. We continue populating
+ // TokenResponse fields so that e.g. lack of an ID token doesn't prevent the caller from receiving a refresh token.
+ idToken, err := m.readIDToken(homeAccountID, aliases, realm, clientID)
+ if err == nil {
+ tr.IDToken = idToken
+ }
+
+ if appMetadata, err := m.readAppMetaData(aliases, clientID); err == nil {
+ // we need the family ID to identify the correct refresh token, if any
+ familyID := appMetadata.FamilyID
+ refreshToken, err := m.readRefreshToken(homeAccountID, aliases, familyID, clientID)
+ if err == nil {
+ tr.RefreshToken = refreshToken
+ }
+ }
+
+ account, err := m.readAccount(homeAccountID, aliases, realm)
+ if err == nil {
+ tr.Account = account
+ }
+ return tr, nil
+}
+
+const scopeSeparator = " "
+
+// Write writes a token response to the cache and returns the account information the token is stored with.
+func (m *Manager) Write(authParameters authority.AuthParams, tokenResponse accesstokens.TokenResponse) (shared.Account, error) {
+ homeAccountID := tokenResponse.HomeAccountID()
+ environment := authParameters.AuthorityInfo.Host
+ realm := authParameters.AuthorityInfo.Tenant
+ clientID := authParameters.ClientID
+ target := strings.Join(tokenResponse.GrantedScopes.Slice, scopeSeparator)
+ cachedAt := time.Now()
+ authnSchemeKeyID := authParameters.AuthnScheme.KeyID()
+
+ var account shared.Account
+
+ if len(tokenResponse.RefreshToken) > 0 {
+ refreshToken := accesstokens.NewRefreshToken(homeAccountID, environment, clientID, tokenResponse.RefreshToken, tokenResponse.FamilyID)
+ if err := m.writeRefreshToken(refreshToken); err != nil {
+ return account, err
+ }
+ }
+
+ if len(tokenResponse.AccessToken) > 0 {
+ accessToken := NewAccessToken(
+ homeAccountID,
+ environment,
+ realm,
+ clientID,
+ cachedAt,
+ tokenResponse.ExpiresOn.T,
+ tokenResponse.ExtExpiresOn.T,
+ target,
+ tokenResponse.AccessToken,
+ tokenResponse.TokenType,
+ authnSchemeKeyID,
+ )
+
+ // Since we have a valid access token, cache it before moving on.
+ if err := accessToken.Validate(); err == nil {
+ if err := m.writeAccessToken(accessToken); err != nil {
+ return account, err
+ }
+ }
+ }
+
+ idTokenJwt := tokenResponse.IDToken
+ if !idTokenJwt.IsZero() {
+ idToken := NewIDToken(homeAccountID, environment, realm, clientID, idTokenJwt.RawToken)
+ if err := m.writeIDToken(idToken); err != nil {
+ return shared.Account{}, err
+ }
+
+ localAccountID := idTokenJwt.LocalAccountID()
+ authorityType := authParameters.AuthorityInfo.AuthorityType
+
+ preferredUsername := idTokenJwt.UPN
+ if idTokenJwt.PreferredUsername != "" {
+ preferredUsername = idTokenJwt.PreferredUsername
+ }
+
+ account = shared.NewAccount(
+ homeAccountID,
+ environment,
+ realm,
+ localAccountID,
+ authorityType,
+ preferredUsername,
+ )
+ if err := m.writeAccount(account); err != nil {
+ return shared.Account{}, err
+ }
+ }
+
+ AppMetaData := NewAppMetaData(tokenResponse.FamilyID, clientID, environment)
+
+ if err := m.writeAppMetaData(AppMetaData); err != nil {
+ return shared.Account{}, err
+ }
+ return account, nil
+}
+
+func (m *Manager) getMetadataEntry(ctx context.Context, authorityInfo authority.Info) (authority.InstanceDiscoveryMetadata, error) {
+ md, err := m.aadMetadataFromCache(ctx, authorityInfo)
+ if err != nil {
+ // not in the cache, retrieve it
+ md, err = m.aadMetadata(ctx, authorityInfo)
+ }
+ return md, err
+}
+
+func (m *Manager) aadMetadataFromCache(ctx context.Context, authorityInfo authority.Info) (authority.InstanceDiscoveryMetadata, error) {
+ m.aadCacheMu.RLock()
+ defer m.aadCacheMu.RUnlock()
+ metadata, ok := m.aadCache[authorityInfo.Host]
+ if ok {
+ return metadata, nil
+ }
+ return metadata, errors.New("not found")
+}
+
+func (m *Manager) aadMetadata(ctx context.Context, authorityInfo authority.Info) (authority.InstanceDiscoveryMetadata, error) {
+ m.aadCacheMu.Lock()
+ defer m.aadCacheMu.Unlock()
+ discoveryResponse, err := m.requests.AADInstanceDiscovery(ctx, authorityInfo)
+ if err != nil {
+ return authority.InstanceDiscoveryMetadata{}, err
+ }
+
+ for _, metadataEntry := range discoveryResponse.Metadata {
+ for _, aliasedAuthority := range metadataEntry.Aliases {
+ m.aadCache[aliasedAuthority] = metadataEntry
+ }
+ }
+ if _, ok := m.aadCache[authorityInfo.Host]; !ok {
+ m.aadCache[authorityInfo.Host] = authority.InstanceDiscoveryMetadata{
+ PreferredNetwork: authorityInfo.Host,
+ PreferredCache: authorityInfo.Host,
+ }
+ }
+ return m.aadCache[authorityInfo.Host], nil
+}
+
+func (m *Manager) readAccessToken(homeID string, envAliases []string, realm, clientID string, scopes []string, tokenType, authnSchemeKeyID string) AccessToken {
+ m.contractMu.RLock()
+ // TODO: linear search (over a map no less) is slow for a large number (thousands) of tokens.
+ // this shows up as the dominating node in a profile. for real-world scenarios this likely isn't
+ // an issue, however if it does become a problem then we know where to look.
+ for k, at := range m.contract.AccessTokens {
+ if at.HomeAccountID == homeID && at.Realm == realm && at.ClientID == clientID {
+ if (strings.EqualFold(at.TokenType, tokenType) && at.AuthnSchemeKeyID == authnSchemeKeyID) || (at.TokenType == "" && (tokenType == "" || tokenType == "Bearer")) {
+ if checkAlias(at.Environment, envAliases) && isMatchingScopes(scopes, at.Scopes) {
+ m.contractMu.RUnlock()
+ if needsUpgrade(k) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ at = upgrade(m.contract.AccessTokens, k)
+ }
+ return at
+ }
+ }
+ }
+ }
+ m.contractMu.RUnlock()
+ return AccessToken{}
+}
+
+func (m *Manager) writeAccessToken(accessToken AccessToken) error {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ key := accessToken.Key()
+ m.contract.AccessTokens[key] = accessToken
+ return nil
+}
+
+func (m *Manager) readRefreshToken(homeID string, envAliases []string, familyID, clientID string) (accesstokens.RefreshToken, error) {
+ byFamily := func(rt accesstokens.RefreshToken) bool {
+ return matchFamilyRefreshToken(rt, homeID, envAliases)
+ }
+ byClient := func(rt accesstokens.RefreshToken) bool {
+ return matchClientIDRefreshToken(rt, homeID, envAliases, clientID)
+ }
+
+ var matchers []func(rt accesstokens.RefreshToken) bool
+ if familyID == "" {
+ matchers = []func(rt accesstokens.RefreshToken) bool{
+ byClient, byFamily,
+ }
+ } else {
+ matchers = []func(rt accesstokens.RefreshToken) bool{
+ byFamily, byClient,
+ }
+ }
+
+ // TODO(keegan): All the tests here pass, but Bogdan says this is
+ // more complicated. I'm opening an issue for this to have him
+ // review the tests and suggest tests that would break this so
+ // we can re-write against good tests. His comments as follow:
+ // The algorithm is a bit more complex than this, I assume there are some tests covering everything. I would keep the order as is.
+ // The algorithm is:
+ // If application is NOT part of the family, search by client_ID
+ // If app is part of the family or if we DO NOT KNOW if it's part of the family, search by family ID, then by client_id (we will know if an app is part of the family after the first token response).
+ // https://github.com/AzureAD/microsoft-authentication-library-for-dotnet/blob/311fe8b16e7c293462806f397e189a6aa1159769/src/client/Microsoft.Identity.Client/Internal/Requests/Silent/CacheSilentStrategy.cs#L95
+ m.contractMu.RLock()
+ for _, matcher := range matchers {
+ for k, rt := range m.contract.RefreshTokens {
+ if matcher(rt) {
+ m.contractMu.RUnlock()
+ if needsUpgrade(k) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ rt = upgrade(m.contract.RefreshTokens, k)
+ }
+ return rt, nil
+ }
+ }
+ }
+
+ m.contractMu.RUnlock()
+ return accesstokens.RefreshToken{}, fmt.Errorf("refresh token not found")
+}
+
+func matchFamilyRefreshToken(rt accesstokens.RefreshToken, homeID string, envAliases []string) bool {
+ return rt.HomeAccountID == homeID && checkAlias(rt.Environment, envAliases) && rt.FamilyID != ""
+}
+
+func matchClientIDRefreshToken(rt accesstokens.RefreshToken, homeID string, envAliases []string, clientID string) bool {
+ return rt.HomeAccountID == homeID && checkAlias(rt.Environment, envAliases) && rt.ClientID == clientID
+}
+
+func (m *Manager) writeRefreshToken(refreshToken accesstokens.RefreshToken) error {
+ key := refreshToken.Key()
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ m.contract.RefreshTokens[key] = refreshToken
+ return nil
+}
+
+func (m *Manager) readIDToken(homeID string, envAliases []string, realm, clientID string) (IDToken, error) {
+ m.contractMu.RLock()
+ for k, idt := range m.contract.IDTokens {
+ if idt.HomeAccountID == homeID && idt.Realm == realm && idt.ClientID == clientID {
+ if checkAlias(idt.Environment, envAliases) {
+ m.contractMu.RUnlock()
+ if needsUpgrade(k) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ idt = upgrade(m.contract.IDTokens, k)
+ }
+ return idt, nil
+ }
+ }
+ }
+ m.contractMu.RUnlock()
+ return IDToken{}, fmt.Errorf("token not found")
+}
+
+func (m *Manager) writeIDToken(idToken IDToken) error {
+ key := idToken.Key()
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ m.contract.IDTokens[key] = idToken
+ return nil
+}
+
+func (m *Manager) AllAccounts() []shared.Account {
+ m.contractMu.RLock()
+ defer m.contractMu.RUnlock()
+
+ var accounts []shared.Account
+ for _, v := range m.contract.Accounts {
+ accounts = append(accounts, v)
+ }
+
+ return accounts
+}
+
+func (m *Manager) Account(homeAccountID string) shared.Account {
+ m.contractMu.RLock()
+ defer m.contractMu.RUnlock()
+
+ for _, v := range m.contract.Accounts {
+ if v.HomeAccountID == homeAccountID {
+ return v
+ }
+ }
+
+ return shared.Account{}
+}
+
+func (m *Manager) readAccount(homeAccountID string, envAliases []string, realm string) (shared.Account, error) {
+ m.contractMu.RLock()
+
+ // You might ask why, if cache.Accounts is a map, we would loop through all of these instead of using a key.
+ // We only use a map because the storage contract shared between all language implementations says use a map.
+ // We can't change that. The other is because the keys are made using a specific "env", but here we are allowing
+ // a match in multiple envs (envAlias). That means we either need to hash each possible keyand do the lookup
+ // or just statically check. Since the design is to have a storage.Manager per user, the amount of keys stored
+ // is really low (say 2). Each hash is more expensive than the entire iteration.
+ for k, acc := range m.contract.Accounts {
+ if acc.HomeAccountID == homeAccountID && checkAlias(acc.Environment, envAliases) && acc.Realm == realm {
+ m.contractMu.RUnlock()
+ if needsUpgrade(k) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ acc = upgrade(m.contract.Accounts, k)
+ }
+ return acc, nil
+ }
+ }
+ m.contractMu.RUnlock()
+ return shared.Account{}, fmt.Errorf("account not found")
+}
+
+func (m *Manager) writeAccount(account shared.Account) error {
+ key := account.Key()
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ m.contract.Accounts[key] = account
+ return nil
+}
+
+func (m *Manager) readAppMetaData(envAliases []string, clientID string) (AppMetaData, error) {
+ m.contractMu.RLock()
+ for k, app := range m.contract.AppMetaData {
+ if checkAlias(app.Environment, envAliases) && app.ClientID == clientID {
+ m.contractMu.RUnlock()
+ if needsUpgrade(k) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ app = upgrade(m.contract.AppMetaData, k)
+ }
+ return app, nil
+ }
+ }
+ m.contractMu.RUnlock()
+ return AppMetaData{}, fmt.Errorf("not found")
+}
+
+func (m *Manager) writeAppMetaData(AppMetaData AppMetaData) error {
+ key := AppMetaData.Key()
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ m.contract.AppMetaData[key] = AppMetaData
+ return nil
+}
+
+// RemoveAccount removes all the associated ATs, RTs and IDTs from the cache associated with this account.
+func (m *Manager) RemoveAccount(account shared.Account, clientID string) {
+ m.removeRefreshTokens(account.HomeAccountID, account.Environment, clientID)
+ m.removeAccessTokens(account.HomeAccountID, account.Environment)
+ m.removeIDTokens(account.HomeAccountID, account.Environment)
+ m.removeAccounts(account.HomeAccountID, account.Environment)
+}
+
+func (m *Manager) removeRefreshTokens(homeID string, env string, clientID string) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ for key, rt := range m.contract.RefreshTokens {
+ // Check for RTs associated with the account.
+ if rt.HomeAccountID == homeID && rt.Environment == env {
+ // Do RT's app ownership check as a precaution, in case family apps
+ // and 3rd-party apps share same token cache, although they should not.
+ if rt.ClientID == clientID || rt.FamilyID != "" {
+ delete(m.contract.RefreshTokens, key)
+ }
+ }
+ }
+}
+
+func (m *Manager) removeAccessTokens(homeID string, env string) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ for key, at := range m.contract.AccessTokens {
+ // Remove AT's associated with the account
+ if at.HomeAccountID == homeID && at.Environment == env {
+ // # To avoid the complexity of locating sibling family app's AT, we skip AT's app ownership check.
+ // It means ATs for other apps will also be removed, it is OK because:
+ // non-family apps are not supposed to share token cache to begin with;
+ // Even if it happens, we keep other app's RT already, so SSO still works.
+ delete(m.contract.AccessTokens, key)
+ }
+ }
+}
+
+func (m *Manager) removeIDTokens(homeID string, env string) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ for key, idt := range m.contract.IDTokens {
+ // Remove ID tokens associated with the account.
+ if idt.HomeAccountID == homeID && idt.Environment == env {
+ delete(m.contract.IDTokens, key)
+ }
+ }
+}
+
+func (m *Manager) removeAccounts(homeID string, env string) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ for key, acc := range m.contract.Accounts {
+ // Remove the specified account.
+ if acc.HomeAccountID == homeID && acc.Environment == env {
+ delete(m.contract.Accounts, key)
+ }
+ }
+}
+
+// update updates the internal cache object. This is for use in tests, other uses are not
+// supported.
+func (m *Manager) update(cache *Contract) {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+ m.contract = cache
+}
+
+// Marshal implements cache.Marshaler.
+func (m *Manager) Marshal() ([]byte, error) {
+ m.contractMu.RLock()
+ defer m.contractMu.RUnlock()
+ return json.Marshal(m.contract)
+}
+
+// Unmarshal implements cache.Unmarshaler.
+func (m *Manager) Unmarshal(b []byte) error {
+ m.contractMu.Lock()
+ defer m.contractMu.Unlock()
+
+ contract := NewContract()
+
+ err := json.Unmarshal(b, contract)
+ if err != nil {
+ return err
+ }
+
+ m.contract = contract
+
+ return nil
+}
@@ -0,0 +1,34 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// package exported contains internal types that are re-exported from a public package
+package exported
+
+// AssertionRequestOptions has information required to generate a client assertion
+type AssertionRequestOptions struct {
+ // ClientID identifies the application for which an assertion is requested. Used as the assertion's "iss" and "sub" claims.
+ ClientID string
+
+ // TokenEndpoint is the intended token endpoint. Used as the assertion's "aud" claim.
+ TokenEndpoint string
+}
+
+// TokenProviderParameters is the authentication parameters passed to token providers
+type TokenProviderParameters struct {
+ // Claims contains any additional claims requested for the token
+ Claims string
+ // CorrelationID of the authentication request
+ CorrelationID string
+ // Scopes requested for the token
+ Scopes []string
+ // TenantID identifies the tenant in which to authenticate
+ TenantID string
+}
+
+// TokenProviderResult is the authentication result returned by custom token providers
+type TokenProviderResult struct {
+ // AccessToken is the requested token
+ AccessToken string
+ // ExpiresInSeconds is the lifetime of the token in seconds
+ ExpiresInSeconds int
+}
@@ -0,0 +1,140 @@
+# JSON Package Design
+Author: John Doak(jdoak@microsoft.com)
+
+## Why?
+
+This project needs a special type of marshal/unmarshal not directly supported
+by the encoding/json package.
+
+The need revolves around a few key wants/needs:
+- unmarshal and marshal structs representing JSON messages
+- fields in the messgage not in the struct must be maintained when unmarshalled
+- those same fields must be marshalled back when encoded again
+
+The initial version used map[string]interface{} to put in the keys that
+were known and then any other keys were put into a field called AdditionalFields.
+
+This has a few negatives:
+- Dual marshaling/unmarshalling is required
+- Adding a struct field requires manually adding a key by name to be encoded/decoded from the map (which is a loosely coupled construct), which can lead to bugs that aren't detected or have bad side effects
+- Tests can become quickly disconnected if those keys aren't put
+in tests as well. So you think you have support working, but you
+don't. Existing tests were found that didn't test the marshalling output.
+- There is no enforcement that if AdditionalFields is required on one struct, it should be on all containers
+that don't have custom marshal/unmarshal.
+
+This package aims to support our needs by providing custom Marshal()/Unmarshal() functions.
+
+This prevents all the negatives in the initial solution listed above. However, it does add its own negative:
+- Custom encoding/decoding via reflection is messy (as can be seen in encoding/json itself)
+
+Go proverb: Reflection is never clear
+Suggested reading: https://blog.golang.org/laws-of-reflection
+
+## Important design decisions
+
+- We don't want to understand all JSON decoding rules
+- We don't want to deal with all the quoting, commas, etc on decode
+- Need support for json.Marshaler/Unmarshaler, so we can support types like time.Time
+- If struct does not implement json.Unmarshaler, it must have AdditionalFields defined
+- We only support root level objects that are \*struct or struct
+
+To faciliate these goals, we will utilize the json.Encoder and json.Decoder.
+They provide streaming processing (efficient) and return errors on bad JSON.
+
+Support for json.Marshaler/Unmarshaler allows for us to use non-basic types
+that must be specially encoded/decoded (like time.Time objects).
+
+We don't support types that can't customer unmarshal or have AdditionalFields
+in order to prevent future devs from forgetting that important field and
+generating bad return values.
+
+Support for root level objects of \*struct or struct simply acknowledges the
+fact that this is designed only for the purposes listed in the Introduction.
+Outside that (like encoding a lone number) should be done with the
+regular json package (as it will not have additional fields).
+
+We don't support a few things on json supported reference types and structs:
+- \*map: no need for pointers to maps
+- \*slice: no need for pointers to slices
+- any further pointers on struct after \*struct
+
+There should never be a need for this in Go.
+
+## Design
+
+## State Machines
+
+This uses state machine designs that based upon the Rob Pike talk on
+lexers and parsers: https://www.youtube.com/watch?v=HxaD_trXwRE
+
+This is the most common pattern for state machines in Go and
+the model to follow closesly when dealing with streaming
+processing of textual data.
+
+Our state machines are based on the type:
+```go
+type stateFn func() (stateFn, error)
+```
+
+The state machine itself is simply a struct that has methods that
+satisfy stateFn.
+
+Our state machines have a few standard calls
+- run(): runs the state machine
+- start(): always the first stateFn to be called
+
+All state machines have the following logic:
+* run() is called
+* start() is called and returns the next stateFn or error
+* stateFn is called
+ - If returned stateFn(next state) is non-nil, call it
+ - If error is non-nil, run() returns the error
+ - If stateFn == nil and err == nil, run() return err == nil
+
+## Supporting types
+
+Marshalling/Unmarshalling must support(within top level struct):
+- struct
+- \*struct
+- []struct
+- []\*struct
+- []map[string]structContainer
+- [][]structContainer
+
+**Term note:** structContainer == type that has a struct or \*struct inside it
+
+We specifically do not support []interface or map[string]interface
+where the interface value would hold some value with a struct in it.
+
+Those will still marshal/unmarshal, but without support for
+AdditionalFields.
+
+## Marshalling
+
+The marshalling design will be based around a statemachine design.
+
+The basic logic is as follows:
+
+* If struct has custom marshaller, call it and return
+* If struct has field "AdditionalFields", it must be a map[string]interface{}
+* If struct does not have "AdditionalFields", give an error
+* Get struct tag detailing json names to go names, create mapping
+* For each public field name
+ - Write field name out
+ - If field value is a struct, recursively call our state machine
+ - Otherwise, use the json.Encoder to write out the value
+
+## Unmarshalling
+
+The unmarshalling desin is also based around a statemachine design. The
+basic logic is as follows:
+
+* If struct has custom marhaller, call it
+* If struct has field "AdditionalFields", it must be a map[string]interface{}
+* Get struct tag detailing json names to go names, create mapping
+* For each key found
+ - If key exists,
+ - If value is basic type, extract value into struct field using Decoder
+ - If value is struct type, recursively call statemachine
+ - If key doesn't exist, add it to AdditionalFields if it exists using Decoder
@@ -0,0 +1,184 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// Package json provide functions for marshalling an unmarshalling types to JSON. These functions are meant to
+// be utilized inside of structs that implement json.Unmarshaler and json.Marshaler interfaces.
+// This package provides the additional functionality of writing fields that are not in the struct when marshalling
+// to a field called AdditionalFields if that field exists and is a map[string]interface{}.
+// When marshalling, if the struct has all the same prerequisites, it will uses the keys in AdditionalFields as
+// extra fields. This package uses encoding/json underneath.
+package json
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+const addField = "AdditionalFields"
+const (
+ marshalJSON = "MarshalJSON"
+ unmarshalJSON = "UnmarshalJSON"
+)
+
+var (
+ leftBrace = []byte("{")[0]
+ rightBrace = []byte("}")[0]
+ comma = []byte(",")[0]
+ leftParen = []byte("[")[0]
+ rightParen = []byte("]")[0]
+)
+
+var mapStrInterType = reflect.TypeOf(map[string]interface{}{})
+
+// stateFn defines a state machine function. This will be used in all state
+// machines in this package.
+type stateFn func() (stateFn, error)
+
+// Marshal is used to marshal a type into its JSON representation. It
+// wraps the stdlib calls in order to marshal a struct or *struct so
+// that a field called "AdditionalFields" of type map[string]interface{}
+// with "-" used inside struct tag `json:"-"` can be marshalled as if
+// they were fields within the struct.
+func Marshal(i interface{}) ([]byte, error) {
+ buff := bytes.Buffer{}
+ enc := json.NewEncoder(&buff)
+ enc.SetEscapeHTML(false)
+ enc.SetIndent("", "")
+
+ v := reflect.ValueOf(i)
+ if v.Kind() != reflect.Ptr && v.CanAddr() {
+ v = v.Addr()
+ }
+ err := marshalStruct(v, &buff, enc)
+ if err != nil {
+ return nil, err
+ }
+ return buff.Bytes(), nil
+}
+
+// Unmarshal unmarshals a []byte representing JSON into i, which must be a *struct. In addition, if the struct has
+// a field called AdditionalFields of type map[string]interface{}, JSON data representing fields not in the struct
+// will be written as key/value pairs to AdditionalFields.
+func Unmarshal(b []byte, i interface{}) error {
+ if len(b) == 0 {
+ return nil
+ }
+
+ jdec := json.NewDecoder(bytes.NewBuffer(b))
+ jdec.UseNumber()
+ return unmarshalStruct(jdec, i)
+}
+
+// MarshalRaw marshals i into a json.RawMessage. If I cannot be marshalled,
+// this will panic. This is exposed to help test AdditionalField values
+// which are stored as json.RawMessage.
+func MarshalRaw(i interface{}) json.RawMessage {
+ b, err := json.Marshal(i)
+ if err != nil {
+ panic(err)
+ }
+ return json.RawMessage(b)
+}
+
+// isDelim simply tests to see if a json.Token is a delimeter.
+func isDelim(got json.Token) bool {
+ switch got.(type) {
+ case json.Delim:
+ return true
+ }
+ return false
+}
+
+// delimIs tests got to see if it is want.
+func delimIs(got json.Token, want rune) bool {
+ switch v := got.(type) {
+ case json.Delim:
+ if v == json.Delim(want) {
+ return true
+ }
+ }
+ return false
+}
+
+// hasMarshalJSON will determine if the value or a pointer to this value has
+// the MarshalJSON method.
+func hasMarshalJSON(v reflect.Value) bool {
+ if method := v.MethodByName(marshalJSON); method.Kind() != reflect.Invalid {
+ _, ok := v.Interface().(json.Marshaler)
+ return ok
+ }
+
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ } else {
+ if !v.CanAddr() {
+ return false
+ }
+ v = v.Addr()
+ }
+
+ if method := v.MethodByName(marshalJSON); method.Kind() != reflect.Invalid {
+ _, ok := v.Interface().(json.Marshaler)
+ return ok
+ }
+ return false
+}
+
+// callMarshalJSON will call MarshalJSON() method on the value or a pointer to this value.
+// This will panic if the method is not defined.
+func callMarshalJSON(v reflect.Value) ([]byte, error) {
+ if method := v.MethodByName(marshalJSON); method.Kind() != reflect.Invalid {
+ marsh := v.Interface().(json.Marshaler)
+ return marsh.MarshalJSON()
+ }
+
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ } else {
+ if v.CanAddr() {
+ v = v.Addr()
+ }
+ }
+
+ if method := v.MethodByName(unmarshalJSON); method.Kind() != reflect.Invalid {
+ marsh := v.Interface().(json.Marshaler)
+ return marsh.MarshalJSON()
+ }
+
+ panic(fmt.Sprintf("callMarshalJSON called on type %T that does not have MarshalJSON defined", v.Interface()))
+}
+
+// hasUnmarshalJSON will determine if the value or a pointer to this value has
+// the UnmarshalJSON method.
+func hasUnmarshalJSON(v reflect.Value) bool {
+ // You can't unmarshal on a non-pointer type.
+ if v.Kind() != reflect.Ptr {
+ if !v.CanAddr() {
+ return false
+ }
+ v = v.Addr()
+ }
+
+ if method := v.MethodByName(unmarshalJSON); method.Kind() != reflect.Invalid {
+ _, ok := v.Interface().(json.Unmarshaler)
+ return ok
+ }
+
+ return false
+}
+
+// hasOmitEmpty indicates if the field has instructed us to not output
+// the field if omitempty is set on the tag. tag is the string
+// returned by reflect.StructField.Tag().Get().
+func hasOmitEmpty(tag string) bool {
+ sl := strings.Split(tag, ",")
+ for _, str := range sl {
+ if str == "omitempty" {
+ return true
+ }
+ }
+ return false
+}
@@ -0,0 +1,333 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package json
+
+import (
+ "encoding/json"
+ "fmt"
+ "reflect"
+)
+
+// unmarshalMap unmarshal's a map.
+func unmarshalMap(dec *json.Decoder, m reflect.Value) error {
+ if m.Kind() != reflect.Ptr || m.Elem().Kind() != reflect.Map {
+ panic("unmarshalMap called on non-*map value")
+ }
+ mapValueType := m.Elem().Type().Elem()
+ walk := mapWalk{dec: dec, m: m, valueType: mapValueType}
+ if err := walk.run(); err != nil {
+ return err
+ }
+ return nil
+}
+
+type mapWalk struct {
+ dec *json.Decoder
+ key string
+ m reflect.Value
+ valueType reflect.Type
+}
+
+// run runs our decoder state machine.
+func (m *mapWalk) run() error {
+ var state = m.start
+ var err error
+ for {
+ state, err = state()
+ if err != nil {
+ return err
+ }
+ if state == nil {
+ return nil
+ }
+ }
+}
+
+func (m *mapWalk) start() (stateFn, error) {
+ // maps can have custom unmarshaler's.
+ if hasUnmarshalJSON(m.m) {
+ err := m.dec.Decode(m.m.Interface())
+ if err != nil {
+ return nil, err
+ }
+ return nil, nil
+ }
+
+ // We only want to use this if the map value is:
+ // *struct/struct/map/slice
+ // otherwise use standard decode
+ t, _ := m.valueBaseType()
+ switch t.Kind() {
+ case reflect.Struct, reflect.Map, reflect.Slice:
+ delim, err := m.dec.Token()
+ if err != nil {
+ return nil, err
+ }
+ // This indicates the value was set to JSON null.
+ if delim == nil {
+ return nil, nil
+ }
+ if !delimIs(delim, '{') {
+ return nil, fmt.Errorf("Unmarshal expected opening {, received %v", delim)
+ }
+ return m.next, nil
+ case reflect.Ptr:
+ return nil, fmt.Errorf("do not support maps with values of '**type' or '*reference")
+ }
+
+ // This is a basic map type, so just use Decode().
+ if err := m.dec.Decode(m.m.Interface()); err != nil {
+ return nil, err
+ }
+
+ return nil, nil
+}
+
+func (m *mapWalk) next() (stateFn, error) {
+ if m.dec.More() {
+ key, err := m.dec.Token()
+ if err != nil {
+ return nil, err
+ }
+ m.key = key.(string)
+ return m.storeValue, nil
+ }
+ // No more entries, so remove final }.
+ _, err := m.dec.Token()
+ if err != nil {
+ return nil, err
+ }
+ return nil, nil
+}
+
+func (m *mapWalk) storeValue() (stateFn, error) {
+ v := m.valueType
+ for {
+ switch v.Kind() {
+ case reflect.Ptr:
+ v = v.Elem()
+ continue
+ case reflect.Struct:
+ return m.storeStruct, nil
+ case reflect.Map:
+ return m.storeMap, nil
+ case reflect.Slice:
+ return m.storeSlice, nil
+ }
+ return nil, fmt.Errorf("bug: mapWalk.storeValue() called on unsupported type: %v", v.Kind())
+ }
+}
+
+func (m *mapWalk) storeStruct() (stateFn, error) {
+ v := newValue(m.valueType)
+ if err := unmarshalStruct(m.dec, v.Interface()); err != nil {
+ return nil, err
+ }
+
+ if m.valueType.Kind() == reflect.Ptr {
+ m.m.Elem().SetMapIndex(reflect.ValueOf(m.key), v)
+ return m.next, nil
+ }
+ m.m.Elem().SetMapIndex(reflect.ValueOf(m.key), v.Elem())
+
+ return m.next, nil
+}
+
+func (m *mapWalk) storeMap() (stateFn, error) {
+ v := reflect.MakeMap(m.valueType)
+ ptr := newValue(v.Type())
+ ptr.Elem().Set(v)
+ if err := unmarshalMap(m.dec, ptr); err != nil {
+ return nil, err
+ }
+
+ m.m.Elem().SetMapIndex(reflect.ValueOf(m.key), v)
+
+ return m.next, nil
+}
+
+func (m *mapWalk) storeSlice() (stateFn, error) {
+ v := newValue(m.valueType)
+ if err := unmarshalSlice(m.dec, v); err != nil {
+ return nil, err
+ }
+
+ m.m.Elem().SetMapIndex(reflect.ValueOf(m.key), v.Elem())
+
+ return m.next, nil
+}
+
+// valueType returns the underlying Type. So a *struct would yield
+// struct, etc...
+func (m *mapWalk) valueBaseType() (reflect.Type, bool) {
+ ptr := false
+ v := m.valueType
+ if v.Kind() == reflect.Ptr {
+ ptr = true
+ v = v.Elem()
+ }
+ return v, ptr
+}
+
+// unmarshalSlice unmarshal's the next value, which must be a slice, into
+// ptrSlice, which must be a pointer to a slice. newValue() can be use to
+// create the slice.
+func unmarshalSlice(dec *json.Decoder, ptrSlice reflect.Value) error {
+ if ptrSlice.Kind() != reflect.Ptr || ptrSlice.Elem().Kind() != reflect.Slice {
+ panic("unmarshalSlice called on non-*[]slice value")
+ }
+ sliceValueType := ptrSlice.Elem().Type().Elem()
+ walk := sliceWalk{
+ dec: dec,
+ s: ptrSlice,
+ valueType: sliceValueType,
+ }
+ if err := walk.run(); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+type sliceWalk struct {
+ dec *json.Decoder
+ s reflect.Value // *[]slice
+ valueType reflect.Type
+}
+
+// run runs our decoder state machine.
+func (s *sliceWalk) run() error {
+ var state = s.start
+ var err error
+ for {
+ state, err = state()
+ if err != nil {
+ return err
+ }
+ if state == nil {
+ return nil
+ }
+ }
+}
+
+func (s *sliceWalk) start() (stateFn, error) {
+ // slices can have custom unmarshaler's.
+ if hasUnmarshalJSON(s.s) {
+ err := s.dec.Decode(s.s.Interface())
+ if err != nil {
+ return nil, err
+ }
+ return nil, nil
+ }
+
+ // We only want to use this if the slice value is:
+ // []*struct/[]struct/[]map/[]slice
+ // otherwise use standard decode
+ t := s.valueBaseType()
+
+ switch t.Kind() {
+ case reflect.Ptr:
+ return nil, fmt.Errorf("cannot unmarshal into a **<type> or *<reference>")
+ case reflect.Struct, reflect.Map, reflect.Slice:
+ delim, err := s.dec.Token()
+ if err != nil {
+ return nil, err
+ }
+ // This indicates the value was set to nil.
+ if delim == nil {
+ return nil, nil
+ }
+ if !delimIs(delim, '[') {
+ return nil, fmt.Errorf("Unmarshal expected opening [, received %v", delim)
+ }
+ return s.next, nil
+ }
+
+ if err := s.dec.Decode(s.s.Interface()); err != nil {
+ return nil, err
+ }
+ return nil, nil
+}
+
+func (s *sliceWalk) next() (stateFn, error) {
+ if s.dec.More() {
+ return s.storeValue, nil
+ }
+ // Nothing left in the slice, remove closing ]
+ _, err := s.dec.Token()
+ return nil, err
+}
+
+func (s *sliceWalk) storeValue() (stateFn, error) {
+ t := s.valueBaseType()
+ switch t.Kind() {
+ case reflect.Ptr:
+ return nil, fmt.Errorf("do not support 'pointer to pointer' or 'pointer to reference' types")
+ case reflect.Struct:
+ return s.storeStruct, nil
+ case reflect.Map:
+ return s.storeMap, nil
+ case reflect.Slice:
+ return s.storeSlice, nil
+ }
+ return nil, fmt.Errorf("bug: sliceWalk.storeValue() called on unsupported type: %v", t.Kind())
+}
+
+func (s *sliceWalk) storeStruct() (stateFn, error) {
+ v := newValue(s.valueType)
+ if err := unmarshalStruct(s.dec, v.Interface()); err != nil {
+ return nil, err
+ }
+
+ if s.valueType.Kind() == reflect.Ptr {
+ s.s.Elem().Set(reflect.Append(s.s.Elem(), v))
+ return s.next, nil
+ }
+
+ s.s.Elem().Set(reflect.Append(s.s.Elem(), v.Elem()))
+ return s.next, nil
+}
+
+func (s *sliceWalk) storeMap() (stateFn, error) {
+ v := reflect.MakeMap(s.valueType)
+ ptr := newValue(v.Type())
+ ptr.Elem().Set(v)
+
+ if err := unmarshalMap(s.dec, ptr); err != nil {
+ return nil, err
+ }
+
+ s.s.Elem().Set(reflect.Append(s.s.Elem(), v))
+
+ return s.next, nil
+}
+
+func (s *sliceWalk) storeSlice() (stateFn, error) {
+ v := newValue(s.valueType)
+ if err := unmarshalSlice(s.dec, v); err != nil {
+ return nil, err
+ }
+
+ s.s.Elem().Set(reflect.Append(s.s.Elem(), v.Elem()))
+
+ return s.next, nil
+}
+
+// valueType returns the underlying Type. So a *struct would yield
+// struct, etc...
+func (s *sliceWalk) valueBaseType() reflect.Type {
+ v := s.valueType
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+ return v
+}
+
+// newValue() returns a new *type that represents type passed.
+func newValue(valueType reflect.Type) reflect.Value {
+ if valueType.Kind() == reflect.Ptr {
+ return reflect.New(valueType.Elem())
+ }
+ return reflect.New(valueType)
+}
@@ -0,0 +1,346 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package json
+
+import (
+ "bytes"
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "unicode"
+)
+
+// marshalStruct takes in i, which must be a *struct or struct and marshals its content
+// as JSON into buff (sometimes with writes to buff directly, sometimes via enc).
+// This call is recursive for all fields of *struct or struct type.
+func marshalStruct(v reflect.Value, buff *bytes.Buffer, enc *json.Encoder) error {
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+ // We only care about custom Marshalling a struct.
+ if v.Kind() != reflect.Struct {
+ return fmt.Errorf("bug: marshal() received a non *struct or struct, received type %T", v.Interface())
+ }
+
+ if hasMarshalJSON(v) {
+ b, err := callMarshalJSON(v)
+ if err != nil {
+ return err
+ }
+ buff.Write(b)
+ return nil
+ }
+
+ t := v.Type()
+
+ // If it has an AdditionalFields field make sure its the right type.
+ f := v.FieldByName(addField)
+ if f.Kind() != reflect.Invalid {
+ if f.Kind() != reflect.Map {
+ return fmt.Errorf("type %T has field 'AdditionalFields' that is not a map[string]interface{}", v.Interface())
+ }
+ if !f.Type().AssignableTo(mapStrInterType) {
+ return fmt.Errorf("type %T has field 'AdditionalFields' that is not a map[string]interface{}", v.Interface())
+ }
+ }
+
+ translator, err := findFields(v)
+ if err != nil {
+ return err
+ }
+
+ buff.WriteByte(leftBrace)
+ for x := 0; x < v.NumField(); x++ {
+ field := v.Field(x)
+
+ // We don't access private fields.
+ if unicode.IsLower(rune(t.Field(x).Name[0])) {
+ continue
+ }
+
+ if t.Field(x).Name == addField {
+ if v.Field(x).Len() > 0 {
+ if err := writeAddFields(field.Interface(), buff, enc); err != nil {
+ return err
+ }
+ buff.WriteByte(comma)
+ }
+ continue
+ }
+
+ // If they have omitempty set, we don't write out the field if
+ // it is the zero value.
+ if hasOmitEmpty(t.Field(x).Tag.Get("json")) {
+ if v.Field(x).IsZero() {
+ continue
+ }
+ }
+
+ // Write out the field name part.
+ jsonName := translator.jsonName(t.Field(x).Name)
+ buff.WriteString(fmt.Sprintf("%q:", jsonName))
+
+ if field.Kind() == reflect.Ptr {
+ field = field.Elem()
+ }
+
+ if err := marshalStructField(field, buff, enc); err != nil {
+ return err
+ }
+ }
+
+ buff.Truncate(buff.Len() - 1) // Remove final comma
+ buff.WriteByte(rightBrace)
+
+ return nil
+}
+
+func marshalStructField(field reflect.Value, buff *bytes.Buffer, enc *json.Encoder) error {
+ // Determine if we need a trailing comma.
+ defer buff.WriteByte(comma)
+
+ switch field.Kind() {
+ // If it was a *struct or struct, we need to recursively all marshal().
+ case reflect.Struct:
+ if field.CanAddr() {
+ field = field.Addr()
+ }
+ return marshalStruct(field, buff, enc)
+ case reflect.Map:
+ return marshalMap(field, buff, enc)
+ case reflect.Slice:
+ return marshalSlice(field, buff, enc)
+ }
+
+ // It is just a basic type, so encode it.
+ if err := enc.Encode(field.Interface()); err != nil {
+ return err
+ }
+ buff.Truncate(buff.Len() - 1) // Remove Encode() added \n
+
+ return nil
+}
+
+func marshalMap(v reflect.Value, buff *bytes.Buffer, enc *json.Encoder) error {
+ if v.Kind() != reflect.Map {
+ return fmt.Errorf("bug: marshalMap() called on %T", v.Interface())
+ }
+ if v.Len() == 0 {
+ buff.WriteByte(leftBrace)
+ buff.WriteByte(rightBrace)
+ return nil
+ }
+ encoder := mapEncode{m: v, buff: buff, enc: enc}
+ return encoder.run()
+}
+
+type mapEncode struct {
+ m reflect.Value
+ buff *bytes.Buffer
+ enc *json.Encoder
+
+ valueBaseType reflect.Type
+}
+
+// run runs our encoder state machine.
+func (m *mapEncode) run() error {
+ var state = m.start
+ var err error
+ for {
+ state, err = state()
+ if err != nil {
+ return err
+ }
+ if state == nil {
+ return nil
+ }
+ }
+}
+
+func (m *mapEncode) start() (stateFn, error) {
+ if hasMarshalJSON(m.m) {
+ b, err := callMarshalJSON(m.m)
+ if err != nil {
+ return nil, err
+ }
+ m.buff.Write(b)
+ return nil, nil
+ }
+
+ valueBaseType := m.m.Type().Elem()
+ if valueBaseType.Kind() == reflect.Ptr {
+ valueBaseType = valueBaseType.Elem()
+ }
+ m.valueBaseType = valueBaseType
+
+ switch valueBaseType.Kind() {
+ case reflect.Ptr:
+ return nil, fmt.Errorf("Marshal does not support **<type> or *<reference>")
+ case reflect.Struct, reflect.Map, reflect.Slice:
+ return m.encode, nil
+ }
+
+ // If the map value doesn't have a struct/map/slice, just Encode() it.
+ if err := m.enc.Encode(m.m.Interface()); err != nil {
+ return nil, err
+ }
+ m.buff.Truncate(m.buff.Len() - 1) // Remove Encode() added \n
+ return nil, nil
+}
+
+func (m *mapEncode) encode() (stateFn, error) {
+ m.buff.WriteByte(leftBrace)
+
+ iter := m.m.MapRange()
+ for iter.Next() {
+ // Write the key.
+ k := iter.Key()
+ m.buff.WriteString(fmt.Sprintf("%q:", k.String()))
+
+ v := iter.Value()
+ switch m.valueBaseType.Kind() {
+ case reflect.Struct:
+ if v.CanAddr() {
+ v = v.Addr()
+ }
+ if err := marshalStruct(v, m.buff, m.enc); err != nil {
+ return nil, err
+ }
+ case reflect.Map:
+ if err := marshalMap(v, m.buff, m.enc); err != nil {
+ return nil, err
+ }
+ case reflect.Slice:
+ if err := marshalSlice(v, m.buff, m.enc); err != nil {
+ return nil, err
+ }
+ default:
+ panic(fmt.Sprintf("critical bug: mapEncode.encode() called with value base type: %v", m.valueBaseType.Kind()))
+ }
+ m.buff.WriteByte(comma)
+ }
+ m.buff.Truncate(m.buff.Len() - 1) // Remove final comma
+ m.buff.WriteByte(rightBrace)
+
+ return nil, nil
+}
+
+func marshalSlice(v reflect.Value, buff *bytes.Buffer, enc *json.Encoder) error {
+ if v.Kind() != reflect.Slice {
+ return fmt.Errorf("bug: marshalSlice() called on %T", v.Interface())
+ }
+ if v.Len() == 0 {
+ buff.WriteByte(leftParen)
+ buff.WriteByte(rightParen)
+ return nil
+ }
+ encoder := sliceEncode{s: v, buff: buff, enc: enc}
+ return encoder.run()
+}
+
+type sliceEncode struct {
+ s reflect.Value
+ buff *bytes.Buffer
+ enc *json.Encoder
+
+ valueBaseType reflect.Type
+}
+
+// run runs our encoder state machine.
+func (s *sliceEncode) run() error {
+ var state = s.start
+ var err error
+ for {
+ state, err = state()
+ if err != nil {
+ return err
+ }
+ if state == nil {
+ return nil
+ }
+ }
+}
+
+func (s *sliceEncode) start() (stateFn, error) {
+ if hasMarshalJSON(s.s) {
+ b, err := callMarshalJSON(s.s)
+ if err != nil {
+ return nil, err
+ }
+ s.buff.Write(b)
+ return nil, nil
+ }
+
+ valueBaseType := s.s.Type().Elem()
+ if valueBaseType.Kind() == reflect.Ptr {
+ valueBaseType = valueBaseType.Elem()
+ }
+ s.valueBaseType = valueBaseType
+
+ switch valueBaseType.Kind() {
+ case reflect.Ptr:
+ return nil, fmt.Errorf("Marshal does not support **<type> or *<reference>")
+ case reflect.Struct, reflect.Map, reflect.Slice:
+ return s.encode, nil
+ }
+
+ // If the map value doesn't have a struct/map/slice, just Encode() it.
+ if err := s.enc.Encode(s.s.Interface()); err != nil {
+ return nil, err
+ }
+ s.buff.Truncate(s.buff.Len() - 1) // Remove Encode added \n
+
+ return nil, nil
+}
+
+func (s *sliceEncode) encode() (stateFn, error) {
+ s.buff.WriteByte(leftParen)
+ for i := 0; i < s.s.Len(); i++ {
+ v := s.s.Index(i)
+ switch s.valueBaseType.Kind() {
+ case reflect.Struct:
+ if v.CanAddr() {
+ v = v.Addr()
+ }
+ if err := marshalStruct(v, s.buff, s.enc); err != nil {
+ return nil, err
+ }
+ case reflect.Map:
+ if err := marshalMap(v, s.buff, s.enc); err != nil {
+ return nil, err
+ }
+ case reflect.Slice:
+ if err := marshalSlice(v, s.buff, s.enc); err != nil {
+ return nil, err
+ }
+ default:
+ panic(fmt.Sprintf("critical bug: mapEncode.encode() called with value base type: %v", s.valueBaseType.Kind()))
+ }
+ s.buff.WriteByte(comma)
+ }
+ s.buff.Truncate(s.buff.Len() - 1) // Remove final comma
+ s.buff.WriteByte(rightParen)
+ return nil, nil
+}
+
+// writeAddFields writes the AdditionalFields struct field out to JSON as field
+// values. i must be a map[string]interface{} or this will panic.
+func writeAddFields(i interface{}, buff *bytes.Buffer, enc *json.Encoder) error {
+ m := i.(map[string]interface{})
+
+ x := 0
+ for k, v := range m {
+ buff.WriteString(fmt.Sprintf("%q:", k))
+ if err := enc.Encode(v); err != nil {
+ return err
+ }
+ buff.Truncate(buff.Len() - 1) // Remove Encode() added \n
+
+ if x+1 != len(m) {
+ buff.WriteByte(comma)
+ }
+ x++
+ }
+ return nil
+}
@@ -0,0 +1,290 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package json
+
+import (
+ "encoding/json"
+ "fmt"
+ "reflect"
+ "strings"
+)
+
+func unmarshalStruct(jdec *json.Decoder, i interface{}) error {
+ v := reflect.ValueOf(i)
+ if v.Kind() != reflect.Ptr {
+ return fmt.Errorf("Unmarshal() received type %T, which is not a *struct", i)
+ }
+ v = v.Elem()
+ if v.Kind() != reflect.Struct {
+ return fmt.Errorf("Unmarshal() received type %T, which is not a *struct", i)
+ }
+
+ if hasUnmarshalJSON(v) {
+ // Indicates that this type has a custom Unmarshaler.
+ return jdec.Decode(v.Addr().Interface())
+ }
+
+ f := v.FieldByName(addField)
+ if f.Kind() == reflect.Invalid {
+ return fmt.Errorf("Unmarshal(%T) only supports structs that have the field AdditionalFields or implements json.Unmarshaler", i)
+ }
+
+ if f.Kind() != reflect.Map || !f.Type().AssignableTo(mapStrInterType) {
+ return fmt.Errorf("type %T has field 'AdditionalFields' that is not a map[string]interface{}", i)
+ }
+
+ dec := newDecoder(jdec, v)
+ return dec.run()
+}
+
+type decoder struct {
+ dec *json.Decoder
+ value reflect.Value // This will be a reflect.Struct
+ translator translateFields
+ key string
+}
+
+func newDecoder(dec *json.Decoder, value reflect.Value) *decoder {
+ return &decoder{value: value, dec: dec}
+}
+
+// run runs our decoder state machine.
+func (d *decoder) run() error {
+ var state = d.start
+ var err error
+ for {
+ state, err = state()
+ if err != nil {
+ return err
+ }
+ if state == nil {
+ return nil
+ }
+ }
+}
+
+// start looks for our opening delimeter '{' and then transitions to looping through our fields.
+func (d *decoder) start() (stateFn, error) {
+ var err error
+ d.translator, err = findFields(d.value)
+ if err != nil {
+ return nil, err
+ }
+
+ delim, err := d.dec.Token()
+ if err != nil {
+ return nil, err
+ }
+ if !delimIs(delim, '{') {
+ return nil, fmt.Errorf("Unmarshal expected opening {, received %v", delim)
+ }
+
+ return d.next, nil
+}
+
+// next gets the next struct field name from the raw json or stops the machine if we get our closing }.
+func (d *decoder) next() (stateFn, error) {
+ if !d.dec.More() {
+ // Remove the closing }.
+ if _, err := d.dec.Token(); err != nil {
+ return nil, err
+ }
+ return nil, nil
+ }
+
+ key, err := d.dec.Token()
+ if err != nil {
+ return nil, err
+ }
+
+ d.key = key.(string)
+ return d.storeValue, nil
+}
+
+// storeValue takes the next value and stores it our struct. If the field can't be found
+// in the struct, it pushes the operation to storeAdditional().
+func (d *decoder) storeValue() (stateFn, error) {
+ goName := d.translator.goName(d.key)
+ if goName == "" {
+ goName = d.key
+ }
+
+ // We don't have the field in the struct, so it goes in AdditionalFields.
+ f := d.value.FieldByName(goName)
+ if f.Kind() == reflect.Invalid {
+ return d.storeAdditional, nil
+ }
+
+ // Indicates that this type has a custom Unmarshaler.
+ if hasUnmarshalJSON(f) {
+ err := d.dec.Decode(f.Addr().Interface())
+ if err != nil {
+ return nil, err
+ }
+ return d.next, nil
+ }
+
+ t, isPtr, err := fieldBaseType(d.value, goName)
+ if err != nil {
+ return nil, fmt.Errorf("type(%s) had field(%s) %w", d.value.Type().Name(), goName, err)
+ }
+
+ switch t.Kind() {
+ // We need to recursively call ourselves on any *struct or struct.
+ case reflect.Struct:
+ if isPtr {
+ if f.IsNil() {
+ f.Set(reflect.New(t))
+ }
+ } else {
+ f = f.Addr()
+ }
+ if err := unmarshalStruct(d.dec, f.Interface()); err != nil {
+ return nil, err
+ }
+ return d.next, nil
+ case reflect.Map:
+ v := reflect.MakeMap(f.Type())
+ ptr := newValue(f.Type())
+ ptr.Elem().Set(v)
+ if err := unmarshalMap(d.dec, ptr); err != nil {
+ return nil, err
+ }
+ f.Set(ptr.Elem())
+ return d.next, nil
+ case reflect.Slice:
+ v := reflect.MakeSlice(f.Type(), 0, 0)
+ ptr := newValue(f.Type())
+ ptr.Elem().Set(v)
+ if err := unmarshalSlice(d.dec, ptr); err != nil {
+ return nil, err
+ }
+ f.Set(ptr.Elem())
+ return d.next, nil
+ }
+
+ if !isPtr {
+ f = f.Addr()
+ }
+
+ // For values that are pointers, we need them to be non-nil in order
+ // to decode into them.
+ if f.IsNil() {
+ f.Set(reflect.New(t))
+ }
+
+ if err := d.dec.Decode(f.Interface()); err != nil {
+ return nil, err
+ }
+
+ return d.next, nil
+}
+
+// storeAdditional pushes the key/value into our .AdditionalFields map.
+func (d *decoder) storeAdditional() (stateFn, error) {
+ rw := json.RawMessage{}
+ if err := d.dec.Decode(&rw); err != nil {
+ return nil, err
+ }
+ field := d.value.FieldByName(addField)
+ if field.IsNil() {
+ field.Set(reflect.MakeMap(field.Type()))
+ }
+ field.SetMapIndex(reflect.ValueOf(d.key), reflect.ValueOf(rw))
+ return d.next, nil
+}
+
+func fieldBaseType(v reflect.Value, fieldName string) (t reflect.Type, isPtr bool, err error) {
+ sf, ok := v.Type().FieldByName(fieldName)
+ if !ok {
+ return nil, false, fmt.Errorf("bug: fieldBaseType() lookup of field(%s) on type(%s): do not have field", fieldName, v.Type().Name())
+ }
+ t = sf.Type
+ if t.Kind() == reflect.Ptr {
+ t = t.Elem()
+ isPtr = true
+ }
+ if t.Kind() == reflect.Ptr {
+ return nil, isPtr, fmt.Errorf("received pointer to pointer type, not supported")
+ }
+ return t, isPtr, nil
+}
+
+type translateField struct {
+ jsonName string
+ goName string
+}
+
+// translateFields is a list of translateFields with a handy lookup method.
+type translateFields []translateField
+
+// goName loops through a list of fields looking for one contaning the jsonName and
+// returning the goName. If not found, returns the empty string.
+// Note: not a map because at this size slices are faster even in tight loops.
+func (t translateFields) goName(jsonName string) string {
+ for _, entry := range t {
+ if entry.jsonName == jsonName {
+ return entry.goName
+ }
+ }
+ return ""
+}
+
+// jsonName loops through a list of fields looking for one contaning the goName and
+// returning the jsonName. If not found, returns the empty string.
+// Note: not a map because at this size slices are faster even in tight loops.
+func (t translateFields) jsonName(goName string) string {
+ for _, entry := range t {
+ if entry.goName == goName {
+ return entry.jsonName
+ }
+ }
+ return ""
+}
+
+var umarshalerType = reflect.TypeOf((*json.Unmarshaler)(nil)).Elem()
+
+// findFields parses a struct and writes the field tags for lookup. It will return an error
+// if any field has a type of *struct or struct that does not implement json.Marshaler.
+func findFields(v reflect.Value) (translateFields, error) {
+ if v.Kind() == reflect.Ptr {
+ v = v.Elem()
+ }
+ if v.Kind() != reflect.Struct {
+ return nil, fmt.Errorf("findFields received a %s type, expected *struct or struct", v.Type().Name())
+ }
+ tfs := make([]translateField, 0, v.NumField())
+ for i := 0; i < v.NumField(); i++ {
+ tf := translateField{
+ goName: v.Type().Field(i).Name,
+ jsonName: parseTag(v.Type().Field(i).Tag.Get("json")),
+ }
+ switch tf.jsonName {
+ case "", "-":
+ tf.jsonName = tf.goName
+ }
+ tfs = append(tfs, tf)
+
+ f := v.Field(i)
+ if f.Kind() == reflect.Ptr {
+ f = f.Elem()
+ }
+ if f.Kind() == reflect.Struct {
+ if f.Type().Implements(umarshalerType) {
+ return nil, fmt.Errorf("struct type %q which has field %q which "+
+ "doesn't implement json.Unmarshaler", v.Type().Name(), v.Type().Field(i).Name)
+ }
+ }
+ }
+ return tfs, nil
+}
+
+// parseTag just returns the first entry in the tag. tag is the string
+// returned by reflect.StructField.Tag().Get().
+func parseTag(tag string) string {
+ if idx := strings.Index(tag, ","); idx != -1 {
+ return tag[:idx]
+ }
+ return tag
+}
@@ -0,0 +1,70 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// Package time provides for custom types to translate time from JSON and other formats
+// into time.Time objects.
+package time
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+ "time"
+)
+
+// Unix provides a type that can marshal and unmarshal a string representation
+// of the unix epoch into a time.Time object.
+type Unix struct {
+ T time.Time
+}
+
+// MarshalJSON implements encoding/json.MarshalJSON().
+func (u Unix) MarshalJSON() ([]byte, error) {
+ if u.T.IsZero() {
+ return []byte(""), nil
+ }
+ return []byte(fmt.Sprintf("%q", strconv.FormatInt(u.T.Unix(), 10))), nil
+}
+
+// UnmarshalJSON implements encoding/json.UnmarshalJSON().
+func (u *Unix) UnmarshalJSON(b []byte) error {
+ i, err := strconv.Atoi(strings.Trim(string(b), `"`))
+ if err != nil {
+ return fmt.Errorf("unix time(%s) could not be converted from string to int: %w", string(b), err)
+ }
+ u.T = time.Unix(int64(i), 0)
+ return nil
+}
+
+// DurationTime provides a type that can marshal and unmarshal a string representation
+// of a duration from now into a time.Time object.
+// Note: I'm not sure this is the best way to do this. What happens is we get a field
+// called "expires_in" that represents the seconds from now that this expires. We
+// turn that into a time we call .ExpiresOn. But maybe we should be recording
+// when the token was received at .TokenRecieved and .ExpiresIn should remain as a duration.
+// Then we could have a method called ExpiresOn(). Honestly, the whole thing is
+// bad because the server doesn't return a concrete time. I think this is
+// cleaner, but its not great either.
+type DurationTime struct {
+ T time.Time
+}
+
+// MarshalJSON implements encoding/json.MarshalJSON().
+func (d DurationTime) MarshalJSON() ([]byte, error) {
+ if d.T.IsZero() {
+ return []byte(""), nil
+ }
+
+ dt := time.Until(d.T)
+ return []byte(fmt.Sprintf("%d", int64(dt*time.Second))), nil
+}
+
+// UnmarshalJSON implements encoding/json.UnmarshalJSON().
+func (d *DurationTime) UnmarshalJSON(b []byte) error {
+ i, err := strconv.Atoi(strings.Trim(string(b), `"`))
+ if err != nil {
+ return fmt.Errorf("unix time(%s) could not be converted from string to int: %w", string(b), err)
+ }
+ d.T = time.Now().Add(time.Duration(i) * time.Second)
+ return nil
+}
@@ -0,0 +1,177 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// Package local contains a local HTTP server used with interactive authentication.
+package local
+
+import (
+ "context"
+ "fmt"
+ "net"
+ "net/http"
+ "strconv"
+ "strings"
+ "time"
+)
+
+var okPage = []byte(`
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8" />
+ <title>Authentication Complete</title>
+</head>
+<body>
+ <p>Authentication complete. You can return to the application. Feel free to close this browser tab.</p>
+</body>
+</html>
+`)
+
+const failPage = `
+<!DOCTYPE html>
+<html>
+<head>
+ <meta charset="utf-8" />
+ <title>Authentication Failed</title>
+</head>
+<body>
+ <p>Authentication failed. You can return to the application. Feel free to close this browser tab.</p>
+ <p>Error details: error %s error_description: %s</p>
+</body>
+</html>
+`
+
+// Result is the result from the redirect.
+type Result struct {
+ // Code is the code sent by the authority server.
+ Code string
+ // Err is set if there was an error.
+ Err error
+}
+
+// Server is an HTTP server.
+type Server struct {
+ // Addr is the address the server is listening on.
+ Addr string
+ resultCh chan Result
+ s *http.Server
+ reqState string
+}
+
+// New creates a local HTTP server and starts it.
+func New(reqState string, port int) (*Server, error) {
+ var l net.Listener
+ var err error
+ var portStr string
+ if port > 0 {
+ // use port provided by caller
+ l, err = net.Listen("tcp", fmt.Sprintf("localhost:%d", port))
+ portStr = strconv.FormatInt(int64(port), 10)
+ } else {
+ // find a free port
+ for i := 0; i < 10; i++ {
+ l, err = net.Listen("tcp", "localhost:0")
+ if err != nil {
+ continue
+ }
+ addr := l.Addr().String()
+ portStr = addr[strings.LastIndex(addr, ":")+1:]
+ break
+ }
+ }
+ if err != nil {
+ return nil, err
+ }
+
+ serv := &Server{
+ Addr: fmt.Sprintf("http://localhost:%s", portStr),
+ s: &http.Server{Addr: "localhost:0", ReadHeaderTimeout: time.Second},
+ reqState: reqState,
+ resultCh: make(chan Result, 1),
+ }
+ serv.s.Handler = http.HandlerFunc(serv.handler)
+
+ if err := serv.start(l); err != nil {
+ return nil, err
+ }
+
+ return serv, nil
+}
+
+func (s *Server) start(l net.Listener) error {
+ go func() {
+ err := s.s.Serve(l)
+ if err != nil {
+ select {
+ case s.resultCh <- Result{Err: err}:
+ default:
+ }
+ }
+ }()
+
+ return nil
+}
+
+// Result gets the result of the redirect operation. Once a single result is returned, the server
+// is shutdown. ctx deadline will be honored.
+func (s *Server) Result(ctx context.Context) Result {
+ select {
+ case <-ctx.Done():
+ return Result{Err: ctx.Err()}
+ case r := <-s.resultCh:
+ return r
+ }
+}
+
+// Shutdown shuts down the server.
+func (s *Server) Shutdown() {
+ // Note: You might get clever and think you can do this in handler() as a defer, you can't.
+ _ = s.s.Shutdown(context.Background())
+}
+
+func (s *Server) putResult(r Result) {
+ select {
+ case s.resultCh <- r:
+ default:
+ }
+}
+
+func (s *Server) handler(w http.ResponseWriter, r *http.Request) {
+ q := r.URL.Query()
+
+ headerErr := q.Get("error")
+ if headerErr != "" {
+ desc := q.Get("error_description")
+ // Note: It is a little weird we handle some errors by not going to the failPage. If they all should,
+ // change this to s.error() and make s.error() write the failPage instead of an error code.
+ _, _ = w.Write([]byte(fmt.Sprintf(failPage, headerErr, desc)))
+ s.putResult(Result{Err: fmt.Errorf(desc)})
+ return
+ }
+
+ respState := q.Get("state")
+ switch respState {
+ case s.reqState:
+ case "":
+ s.error(w, http.StatusInternalServerError, "server didn't send OAuth state")
+ return
+ default:
+ s.error(w, http.StatusInternalServerError, "mismatched OAuth state, req(%s), resp(%s)", s.reqState, respState)
+ return
+ }
+
+ code := q.Get("code")
+ if code == "" {
+ s.error(w, http.StatusInternalServerError, "authorization code missing in query string")
+ return
+ }
+
+ _, _ = w.Write(okPage)
+ s.putResult(Result{Code: code})
+}
+
+func (s *Server) error(w http.ResponseWriter, code int, str string, i ...interface{}) {
+ err := fmt.Errorf(str, i...)
+ http.Error(w, err.Error(), code)
+ s.putResult(Result{Err: err})
+}
@@ -0,0 +1,354 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package oauth
+
+import (
+ "context"
+ "encoding/json"
+ "fmt"
+ "io"
+ "time"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/errors"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/exported"
+ internalTime "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/types/time"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/defs"
+ "github.com/google/uuid"
+)
+
+// ResolveEndpointer contains the methods for resolving authority endpoints.
+type ResolveEndpointer interface {
+ ResolveEndpoints(ctx context.Context, authorityInfo authority.Info, userPrincipalName string) (authority.Endpoints, error)
+}
+
+// AccessTokens contains the methods for fetching tokens from different sources.
+type AccessTokens interface {
+ DeviceCodeResult(ctx context.Context, authParameters authority.AuthParams) (accesstokens.DeviceCodeResult, error)
+ FromUsernamePassword(ctx context.Context, authParameters authority.AuthParams) (accesstokens.TokenResponse, error)
+ FromAuthCode(ctx context.Context, req accesstokens.AuthCodeRequest) (accesstokens.TokenResponse, error)
+ FromRefreshToken(ctx context.Context, appType accesstokens.AppType, authParams authority.AuthParams, cc *accesstokens.Credential, refreshToken string) (accesstokens.TokenResponse, error)
+ FromClientSecret(ctx context.Context, authParameters authority.AuthParams, clientSecret string) (accesstokens.TokenResponse, error)
+ FromAssertion(ctx context.Context, authParameters authority.AuthParams, assertion string) (accesstokens.TokenResponse, error)
+ FromUserAssertionClientSecret(ctx context.Context, authParameters authority.AuthParams, userAssertion string, clientSecret string) (accesstokens.TokenResponse, error)
+ FromUserAssertionClientCertificate(ctx context.Context, authParameters authority.AuthParams, userAssertion string, assertion string) (accesstokens.TokenResponse, error)
+ FromDeviceCodeResult(ctx context.Context, authParameters authority.AuthParams, deviceCodeResult accesstokens.DeviceCodeResult) (accesstokens.TokenResponse, error)
+ FromSamlGrant(ctx context.Context, authParameters authority.AuthParams, samlGrant wstrust.SamlTokenInfo) (accesstokens.TokenResponse, error)
+}
+
+// FetchAuthority will be implemented by authority.Authority.
+type FetchAuthority interface {
+ UserRealm(context.Context, authority.AuthParams) (authority.UserRealm, error)
+ AADInstanceDiscovery(context.Context, authority.Info) (authority.InstanceDiscoveryResponse, error)
+}
+
+// FetchWSTrust contains the methods for interacting with WSTrust endpoints.
+type FetchWSTrust interface {
+ Mex(ctx context.Context, federationMetadataURL string) (defs.MexDocument, error)
+ SAMLTokenInfo(ctx context.Context, authParameters authority.AuthParams, cloudAudienceURN string, endpoint defs.Endpoint) (wstrust.SamlTokenInfo, error)
+}
+
+// Client provides tokens for various types of token requests.
+type Client struct {
+ Resolver ResolveEndpointer
+ AccessTokens AccessTokens
+ Authority FetchAuthority
+ WSTrust FetchWSTrust
+}
+
+// New is the constructor for Token.
+func New(httpClient ops.HTTPClient) *Client {
+ r := ops.New(httpClient)
+ return &Client{
+ Resolver: newAuthorityEndpoint(r),
+ AccessTokens: r.AccessTokens(),
+ Authority: r.Authority(),
+ WSTrust: r.WSTrust(),
+ }
+}
+
+// ResolveEndpoints gets the authorization and token endpoints and creates an AuthorityEndpoints instance.
+func (t *Client) ResolveEndpoints(ctx context.Context, authorityInfo authority.Info, userPrincipalName string) (authority.Endpoints, error) {
+ return t.Resolver.ResolveEndpoints(ctx, authorityInfo, userPrincipalName)
+}
+
+// AADInstanceDiscovery attempts to discover a tenant endpoint (used in OIDC auth with an authorization endpoint).
+// This is done by AAD which allows for aliasing of tenants (windows.sts.net is the same as login.windows.com).
+func (t *Client) AADInstanceDiscovery(ctx context.Context, authorityInfo authority.Info) (authority.InstanceDiscoveryResponse, error) {
+ return t.Authority.AADInstanceDiscovery(ctx, authorityInfo)
+}
+
+// AuthCode returns a token based on an authorization code.
+func (t *Client) AuthCode(ctx context.Context, req accesstokens.AuthCodeRequest) (accesstokens.TokenResponse, error) {
+ if err := scopeError(req.AuthParams); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ if err := t.resolveEndpoint(ctx, &req.AuthParams, ""); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+
+ tResp, err := t.AccessTokens.FromAuthCode(ctx, req)
+ if err != nil {
+ return accesstokens.TokenResponse{}, fmt.Errorf("could not retrieve token from auth code: %w", err)
+ }
+ return tResp, nil
+}
+
+// Credential acquires a token from the authority using a client credentials grant.
+func (t *Client) Credential(ctx context.Context, authParams authority.AuthParams, cred *accesstokens.Credential) (accesstokens.TokenResponse, error) {
+ if cred.TokenProvider != nil {
+ now := time.Now()
+ scopes := make([]string, len(authParams.Scopes))
+ copy(scopes, authParams.Scopes)
+ params := exported.TokenProviderParameters{
+ Claims: authParams.Claims,
+ CorrelationID: uuid.New().String(),
+ Scopes: scopes,
+ TenantID: authParams.AuthorityInfo.Tenant,
+ }
+ tr, err := cred.TokenProvider(ctx, params)
+ if err != nil {
+ if len(scopes) == 0 {
+ err = fmt.Errorf("token request had an empty authority.AuthParams.Scopes, which may cause the following error: %w", err)
+ return accesstokens.TokenResponse{}, err
+ }
+ return accesstokens.TokenResponse{}, err
+ }
+ return accesstokens.TokenResponse{
+ TokenType: authParams.AuthnScheme.AccessTokenType(),
+ AccessToken: tr.AccessToken,
+ ExpiresOn: internalTime.DurationTime{
+ T: now.Add(time.Duration(tr.ExpiresInSeconds) * time.Second),
+ },
+ GrantedScopes: accesstokens.Scopes{Slice: authParams.Scopes},
+ }, nil
+ }
+
+ if err := t.resolveEndpoint(ctx, &authParams, ""); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+
+ if cred.Secret != "" {
+ return t.AccessTokens.FromClientSecret(ctx, authParams, cred.Secret)
+ }
+ jwt, err := cred.JWT(ctx, authParams)
+ if err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ return t.AccessTokens.FromAssertion(ctx, authParams, jwt)
+}
+
+// Credential acquires a token from the authority using a client credentials grant.
+func (t *Client) OnBehalfOf(ctx context.Context, authParams authority.AuthParams, cred *accesstokens.Credential) (accesstokens.TokenResponse, error) {
+ if err := scopeError(authParams); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ if err := t.resolveEndpoint(ctx, &authParams, ""); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+
+ if cred.Secret != "" {
+ return t.AccessTokens.FromUserAssertionClientSecret(ctx, authParams, authParams.UserAssertion, cred.Secret)
+ }
+ jwt, err := cred.JWT(ctx, authParams)
+ if err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ tr, err := t.AccessTokens.FromUserAssertionClientCertificate(ctx, authParams, authParams.UserAssertion, jwt)
+ if err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ return tr, nil
+}
+
+func (t *Client) Refresh(ctx context.Context, reqType accesstokens.AppType, authParams authority.AuthParams, cc *accesstokens.Credential, refreshToken accesstokens.RefreshToken) (accesstokens.TokenResponse, error) {
+ if err := scopeError(authParams); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ if err := t.resolveEndpoint(ctx, &authParams, ""); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+
+ tr, err := t.AccessTokens.FromRefreshToken(ctx, reqType, authParams, cc, refreshToken.Secret)
+ if err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ return tr, nil
+}
+
+// UsernamePassword retrieves a token where a username and password is used. However, if this is
+// a user realm of "Federated", this uses SAML tokens. If "Managed", uses normal username/password.
+func (t *Client) UsernamePassword(ctx context.Context, authParams authority.AuthParams) (accesstokens.TokenResponse, error) {
+ if err := scopeError(authParams); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+
+ if authParams.AuthorityInfo.AuthorityType == authority.ADFS {
+ if err := t.resolveEndpoint(ctx, &authParams, authParams.Username); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ return t.AccessTokens.FromUsernamePassword(ctx, authParams)
+ }
+ if err := t.resolveEndpoint(ctx, &authParams, ""); err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+
+ userRealm, err := t.Authority.UserRealm(ctx, authParams)
+ if err != nil {
+ return accesstokens.TokenResponse{}, fmt.Errorf("problem getting user realm from authority: %w", err)
+ }
+
+ switch userRealm.AccountType {
+ case authority.Federated:
+ mexDoc, err := t.WSTrust.Mex(ctx, userRealm.FederationMetadataURL)
+ if err != nil {
+ err = fmt.Errorf("problem getting mex doc from federated url(%s): %w", userRealm.FederationMetadataURL, err)
+ return accesstokens.TokenResponse{}, err
+ }
+
+ saml, err := t.WSTrust.SAMLTokenInfo(ctx, authParams, userRealm.CloudAudienceURN, mexDoc.UsernamePasswordEndpoint)
+ if err != nil {
+ err = fmt.Errorf("problem getting SAML token info: %w", err)
+ return accesstokens.TokenResponse{}, err
+ }
+ tr, err := t.AccessTokens.FromSamlGrant(ctx, authParams, saml)
+ if err != nil {
+ return accesstokens.TokenResponse{}, err
+ }
+ return tr, nil
+ case authority.Managed:
+ if len(authParams.Scopes) == 0 {
+ err = fmt.Errorf("token request had an empty authority.AuthParams.Scopes, which may cause the following error: %w", err)
+ return accesstokens.TokenResponse{}, err
+ }
+ return t.AccessTokens.FromUsernamePassword(ctx, authParams)
+ }
+ return accesstokens.TokenResponse{}, errors.New("unknown account type")
+}
+
+// DeviceCode is the result of a call to Token.DeviceCode().
+type DeviceCode struct {
+ // Result is the device code result from the first call in the device code flow. This allows
+ // the caller to retrieve the displayed code that is used to authorize on the second device.
+ Result accesstokens.DeviceCodeResult
+ authParams authority.AuthParams
+
+ accessTokens AccessTokens
+}
+
+// Token returns a token AFTER the user uses the user code on the second device. This will block
+// until either: (1) the code is input by the user and the service releases a token, (2) the token
+// expires, (3) the Context passed to .DeviceCode() is cancelled or expires, (4) some other service
+// error occurs.
+func (d DeviceCode) Token(ctx context.Context) (accesstokens.TokenResponse, error) {
+ if d.accessTokens == nil {
+ return accesstokens.TokenResponse{}, fmt.Errorf("DeviceCode was either created outside its package or the creating method had an error. DeviceCode is not valid")
+ }
+
+ var cancel context.CancelFunc
+ if deadline, ok := ctx.Deadline(); !ok || d.Result.ExpiresOn.Before(deadline) {
+ ctx, cancel = context.WithDeadline(ctx, d.Result.ExpiresOn)
+ } else {
+ ctx, cancel = context.WithCancel(ctx)
+ }
+ defer cancel()
+
+ var interval = 50 * time.Millisecond
+ timer := time.NewTimer(interval)
+ defer timer.Stop()
+
+ for {
+ timer.Reset(interval)
+ select {
+ case <-ctx.Done():
+ return accesstokens.TokenResponse{}, ctx.Err()
+ case <-timer.C:
+ interval += interval * 2
+ if interval > 5*time.Second {
+ interval = 5 * time.Second
+ }
+ }
+
+ token, err := d.accessTokens.FromDeviceCodeResult(ctx, d.authParams, d.Result)
+ if err != nil && isWaitDeviceCodeErr(err) {
+ continue
+ }
+ return token, err // This handles if it was a non-wait error or success
+ }
+}
+
+type deviceCodeError struct {
+ Error string `json:"error"`
+}
+
+func isWaitDeviceCodeErr(err error) bool {
+ var c errors.CallErr
+ if !errors.As(err, &c) {
+ return false
+ }
+ if c.Resp.StatusCode != 400 {
+ return false
+ }
+ var dCErr deviceCodeError
+ defer c.Resp.Body.Close()
+ body, err := io.ReadAll(c.Resp.Body)
+ if err != nil {
+ return false
+ }
+ err = json.Unmarshal(body, &dCErr)
+ if err != nil {
+ return false
+ }
+ if dCErr.Error == "authorization_pending" || dCErr.Error == "slow_down" {
+ return true
+ }
+ return false
+}
+
+// DeviceCode returns a DeviceCode object that can be used to get the code that must be entered on the second
+// device and optionally the token once the code has been entered on the second device.
+func (t *Client) DeviceCode(ctx context.Context, authParams authority.AuthParams) (DeviceCode, error) {
+ if err := scopeError(authParams); err != nil {
+ return DeviceCode{}, err
+ }
+
+ if err := t.resolveEndpoint(ctx, &authParams, ""); err != nil {
+ return DeviceCode{}, err
+ }
+
+ dcr, err := t.AccessTokens.DeviceCodeResult(ctx, authParams)
+ if err != nil {
+ return DeviceCode{}, err
+ }
+
+ return DeviceCode{Result: dcr, authParams: authParams, accessTokens: t.AccessTokens}, nil
+}
+
+func (t *Client) resolveEndpoint(ctx context.Context, authParams *authority.AuthParams, userPrincipalName string) error {
+ endpoints, err := t.Resolver.ResolveEndpoints(ctx, authParams.AuthorityInfo, userPrincipalName)
+ if err != nil {
+ return fmt.Errorf("unable to resolve an endpoint: %s", err)
+ }
+ authParams.Endpoints = endpoints
+ return nil
+}
+
+// scopeError takes an authority.AuthParams and returns an error
+// if len(AuthParams.Scope) == 0.
+func scopeError(a authority.AuthParams) error {
+ // TODO(someone): we could look deeper at the message to determine if
+ // it's a scope error, but this is a good start.
+ /*
+ {error":"invalid_scope","error_description":"AADSTS1002012: The provided value for scope
+ openid offline_access profile is not valid. Client credential flows must have a scope value
+ with /.default suffixed to the resource identifier (application ID URI)...}
+ */
+ if len(a.Scopes) == 0 {
+ return fmt.Errorf("token request had an empty authority.AuthParams.Scopes, which is invalid")
+ }
+ return nil
+}
@@ -0,0 +1,457 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+/*
+Package accesstokens exposes a REST client for querying backend systems to get various types of
+access tokens (oauth) for use in authentication.
+
+These calls are of type "application/x-www-form-urlencoded". This means we use url.Values to
+represent arguments and then encode them into the POST body message. We receive JSON in
+return for the requests. The request definition is defined in https://tools.ietf.org/html/rfc7521#section-4.2 .
+*/
+package accesstokens
+
+import (
+ "context"
+ "crypto"
+
+ /* #nosec */
+ "crypto/sha1"
+ "crypto/x509"
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/exported"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/internal/grant"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust"
+ "github.com/golang-jwt/jwt/v5"
+ "github.com/google/uuid"
+)
+
+const (
+ grantType = "grant_type"
+ deviceCode = "device_code"
+ clientID = "client_id"
+ clientInfo = "client_info"
+ clientInfoVal = "1"
+ username = "username"
+ password = "password"
+)
+
+//go:generate stringer -type=AppType
+
+// AppType is whether the authorization code flow is for a public or confidential client.
+type AppType int8
+
+const (
+ // ATUnknown is the zero value when the type hasn't been set.
+ ATUnknown AppType = iota
+ // ATPublic indicates this if for the Public.Client.
+ ATPublic
+ // ATConfidential indicates this if for the Confidential.Client.
+ ATConfidential
+)
+
+type urlFormCaller interface {
+ URLFormCall(ctx context.Context, endpoint string, qv url.Values, resp interface{}) error
+}
+
+// DeviceCodeResponse represents the HTTP response received from the device code endpoint
+type DeviceCodeResponse struct {
+ authority.OAuthResponseBase
+
+ UserCode string `json:"user_code"`
+ DeviceCode string `json:"device_code"`
+ VerificationURL string `json:"verification_url"`
+ ExpiresIn int `json:"expires_in"`
+ Interval int `json:"interval"`
+ Message string `json:"message"`
+
+ AdditionalFields map[string]interface{}
+}
+
+// Convert converts the DeviceCodeResponse to a DeviceCodeResult
+func (dcr DeviceCodeResponse) Convert(clientID string, scopes []string) DeviceCodeResult {
+ expiresOn := time.Now().UTC().Add(time.Duration(dcr.ExpiresIn) * time.Second)
+ return NewDeviceCodeResult(dcr.UserCode, dcr.DeviceCode, dcr.VerificationURL, expiresOn, dcr.Interval, dcr.Message, clientID, scopes)
+}
+
+// Credential represents the credential used in confidential client flows. This can be either
+// a Secret or Cert/Key.
+type Credential struct {
+ // Secret contains the credential secret if we are doing auth by secret.
+ Secret string
+
+ // Cert is the public certificate, if we're authenticating by certificate.
+ Cert *x509.Certificate
+ // Key is the private key for signing, if we're authenticating by certificate.
+ Key crypto.PrivateKey
+ // X5c is the JWT assertion's x5c header value, required for SN/I authentication.
+ X5c []string
+
+ // AssertionCallback is a function provided by the application, if we're authenticating by assertion.
+ AssertionCallback func(context.Context, exported.AssertionRequestOptions) (string, error)
+
+ // TokenProvider is a function provided by the application that implements custom authentication
+ // logic for a confidential client
+ TokenProvider func(context.Context, exported.TokenProviderParameters) (exported.TokenProviderResult, error)
+}
+
+// JWT gets the jwt assertion when the credential is not using a secret.
+func (c *Credential) JWT(ctx context.Context, authParams authority.AuthParams) (string, error) {
+ if c.AssertionCallback != nil {
+ options := exported.AssertionRequestOptions{
+ ClientID: authParams.ClientID,
+ TokenEndpoint: authParams.Endpoints.TokenEndpoint,
+ }
+ return c.AssertionCallback(ctx, options)
+ }
+
+ token := jwt.NewWithClaims(jwt.SigningMethodRS256, jwt.MapClaims{
+ "aud": authParams.Endpoints.TokenEndpoint,
+ "exp": json.Number(strconv.FormatInt(time.Now().Add(10*time.Minute).Unix(), 10)),
+ "iss": authParams.ClientID,
+ "jti": uuid.New().String(),
+ "nbf": json.Number(strconv.FormatInt(time.Now().Unix(), 10)),
+ "sub": authParams.ClientID,
+ })
+ token.Header = map[string]interface{}{
+ "alg": "RS256",
+ "typ": "JWT",
+ "x5t": base64.StdEncoding.EncodeToString(thumbprint(c.Cert)),
+ }
+
+ if authParams.SendX5C {
+ token.Header["x5c"] = c.X5c
+ }
+
+ assertion, err := token.SignedString(c.Key)
+ if err != nil {
+ return "", fmt.Errorf("unable to sign a JWT token using private key: %w", err)
+ }
+ return assertion, nil
+}
+
+// thumbprint runs the asn1.Der bytes through sha1 for use in the x5t parameter of JWT.
+// https://tools.ietf.org/html/rfc7517#section-4.8
+func thumbprint(cert *x509.Certificate) []byte {
+ /* #nosec */
+ a := sha1.Sum(cert.Raw)
+ return a[:]
+}
+
+// Client represents the REST calls to get tokens from token generator backends.
+type Client struct {
+ // Comm provides the HTTP transport client.
+ Comm urlFormCaller
+
+ testing bool
+}
+
+// FromUsernamePassword uses a username and password to get an access token.
+func (c Client) FromUsernamePassword(ctx context.Context, authParameters authority.AuthParams) (TokenResponse, error) {
+ qv := url.Values{}
+ if err := addClaims(qv, authParameters); err != nil {
+ return TokenResponse{}, err
+ }
+ qv.Set(grantType, grant.Password)
+ qv.Set(username, authParameters.Username)
+ qv.Set(password, authParameters.Password)
+ qv.Set(clientID, authParameters.ClientID)
+ qv.Set(clientInfo, clientInfoVal)
+ addScopeQueryParam(qv, authParameters)
+
+ return c.doTokenResp(ctx, authParameters, qv)
+}
+
+// AuthCodeRequest stores the values required to request a token from the authority using an authorization code
+type AuthCodeRequest struct {
+ AuthParams authority.AuthParams
+ Code string
+ CodeChallenge string
+ Credential *Credential
+ AppType AppType
+}
+
+// NewCodeChallengeRequest returns an AuthCodeRequest that uses a code challenge..
+func NewCodeChallengeRequest(params authority.AuthParams, appType AppType, cc *Credential, code, challenge string) (AuthCodeRequest, error) {
+ if appType == ATUnknown {
+ return AuthCodeRequest{}, fmt.Errorf("bug: NewCodeChallengeRequest() called with AppType == ATUnknown")
+ }
+ return AuthCodeRequest{
+ AuthParams: params,
+ AppType: appType,
+ Code: code,
+ CodeChallenge: challenge,
+ Credential: cc,
+ }, nil
+}
+
+// FromAuthCode uses an authorization code to retrieve an access token.
+func (c Client) FromAuthCode(ctx context.Context, req AuthCodeRequest) (TokenResponse, error) {
+ var qv url.Values
+
+ switch req.AppType {
+ case ATUnknown:
+ return TokenResponse{}, fmt.Errorf("bug: Token.AuthCode() received request with AppType == ATUnknown")
+ case ATConfidential:
+ var err error
+ if req.Credential == nil {
+ return TokenResponse{}, fmt.Errorf("AuthCodeRequest had nil Credential for Confidential app")
+ }
+ qv, err = prepURLVals(ctx, req.Credential, req.AuthParams)
+ if err != nil {
+ return TokenResponse{}, err
+ }
+ case ATPublic:
+ qv = url.Values{}
+ default:
+ return TokenResponse{}, fmt.Errorf("bug: Token.AuthCode() received request with AppType == %v, which we do not recongnize", req.AppType)
+ }
+
+ qv.Set(grantType, grant.AuthCode)
+ qv.Set("code", req.Code)
+ qv.Set("code_verifier", req.CodeChallenge)
+ qv.Set("redirect_uri", req.AuthParams.Redirecturi)
+ qv.Set(clientID, req.AuthParams.ClientID)
+ qv.Set(clientInfo, clientInfoVal)
+ addScopeQueryParam(qv, req.AuthParams)
+ if err := addClaims(qv, req.AuthParams); err != nil {
+ return TokenResponse{}, err
+ }
+
+ return c.doTokenResp(ctx, req.AuthParams, qv)
+}
+
+// FromRefreshToken uses a refresh token (for refreshing credentials) to get a new access token.
+func (c Client) FromRefreshToken(ctx context.Context, appType AppType, authParams authority.AuthParams, cc *Credential, refreshToken string) (TokenResponse, error) {
+ qv := url.Values{}
+ if appType == ATConfidential {
+ var err error
+ qv, err = prepURLVals(ctx, cc, authParams)
+ if err != nil {
+ return TokenResponse{}, err
+ }
+ }
+ if err := addClaims(qv, authParams); err != nil {
+ return TokenResponse{}, err
+ }
+ qv.Set(grantType, grant.RefreshToken)
+ qv.Set(clientID, authParams.ClientID)
+ qv.Set(clientInfo, clientInfoVal)
+ qv.Set("refresh_token", refreshToken)
+ addScopeQueryParam(qv, authParams)
+
+ return c.doTokenResp(ctx, authParams, qv)
+}
+
+// FromClientSecret uses a client's secret (aka password) to get a new token.
+func (c Client) FromClientSecret(ctx context.Context, authParameters authority.AuthParams, clientSecret string) (TokenResponse, error) {
+ qv := url.Values{}
+ if err := addClaims(qv, authParameters); err != nil {
+ return TokenResponse{}, err
+ }
+ qv.Set(grantType, grant.ClientCredential)
+ qv.Set("client_secret", clientSecret)
+ qv.Set(clientID, authParameters.ClientID)
+ addScopeQueryParam(qv, authParameters)
+
+ token, err := c.doTokenResp(ctx, authParameters, qv)
+ if err != nil {
+ return token, fmt.Errorf("FromClientSecret(): %w", err)
+ }
+ return token, nil
+}
+
+func (c Client) FromAssertion(ctx context.Context, authParameters authority.AuthParams, assertion string) (TokenResponse, error) {
+ qv := url.Values{}
+ if err := addClaims(qv, authParameters); err != nil {
+ return TokenResponse{}, err
+ }
+ qv.Set(grantType, grant.ClientCredential)
+ qv.Set("client_assertion_type", grant.ClientAssertion)
+ qv.Set("client_assertion", assertion)
+ qv.Set(clientID, authParameters.ClientID)
+ qv.Set(clientInfo, clientInfoVal)
+ addScopeQueryParam(qv, authParameters)
+
+ token, err := c.doTokenResp(ctx, authParameters, qv)
+ if err != nil {
+ return token, fmt.Errorf("FromAssertion(): %w", err)
+ }
+ return token, nil
+}
+
+func (c Client) FromUserAssertionClientSecret(ctx context.Context, authParameters authority.AuthParams, userAssertion string, clientSecret string) (TokenResponse, error) {
+ qv := url.Values{}
+ if err := addClaims(qv, authParameters); err != nil {
+ return TokenResponse{}, err
+ }
+ qv.Set(grantType, grant.JWT)
+ qv.Set(clientID, authParameters.ClientID)
+ qv.Set("client_secret", clientSecret)
+ qv.Set("assertion", userAssertion)
+ qv.Set(clientInfo, clientInfoVal)
+ qv.Set("requested_token_use", "on_behalf_of")
+ addScopeQueryParam(qv, authParameters)
+
+ return c.doTokenResp(ctx, authParameters, qv)
+}
+
+func (c Client) FromUserAssertionClientCertificate(ctx context.Context, authParameters authority.AuthParams, userAssertion string, assertion string) (TokenResponse, error) {
+ qv := url.Values{}
+ if err := addClaims(qv, authParameters); err != nil {
+ return TokenResponse{}, err
+ }
+ qv.Set(grantType, grant.JWT)
+ qv.Set("client_assertion_type", grant.ClientAssertion)
+ qv.Set("client_assertion", assertion)
+ qv.Set(clientID, authParameters.ClientID)
+ qv.Set("assertion", userAssertion)
+ qv.Set(clientInfo, clientInfoVal)
+ qv.Set("requested_token_use", "on_behalf_of")
+ addScopeQueryParam(qv, authParameters)
+
+ return c.doTokenResp(ctx, authParameters, qv)
+}
+
+func (c Client) DeviceCodeResult(ctx context.Context, authParameters authority.AuthParams) (DeviceCodeResult, error) {
+ qv := url.Values{}
+ if err := addClaims(qv, authParameters); err != nil {
+ return DeviceCodeResult{}, err
+ }
+ qv.Set(clientID, authParameters.ClientID)
+ addScopeQueryParam(qv, authParameters)
+
+ endpoint := strings.Replace(authParameters.Endpoints.TokenEndpoint, "token", "devicecode", -1)
+
+ resp := DeviceCodeResponse{}
+ err := c.Comm.URLFormCall(ctx, endpoint, qv, &resp)
+ if err != nil {
+ return DeviceCodeResult{}, err
+ }
+
+ return resp.Convert(authParameters.ClientID, authParameters.Scopes), nil
+}
+
+func (c Client) FromDeviceCodeResult(ctx context.Context, authParameters authority.AuthParams, deviceCodeResult DeviceCodeResult) (TokenResponse, error) {
+ qv := url.Values{}
+ if err := addClaims(qv, authParameters); err != nil {
+ return TokenResponse{}, err
+ }
+ qv.Set(grantType, grant.DeviceCode)
+ qv.Set(deviceCode, deviceCodeResult.DeviceCode)
+ qv.Set(clientID, authParameters.ClientID)
+ qv.Set(clientInfo, clientInfoVal)
+ addScopeQueryParam(qv, authParameters)
+
+ return c.doTokenResp(ctx, authParameters, qv)
+}
+
+func (c Client) FromSamlGrant(ctx context.Context, authParameters authority.AuthParams, samlGrant wstrust.SamlTokenInfo) (TokenResponse, error) {
+ qv := url.Values{}
+ if err := addClaims(qv, authParameters); err != nil {
+ return TokenResponse{}, err
+ }
+ qv.Set(username, authParameters.Username)
+ qv.Set(password, authParameters.Password)
+ qv.Set(clientID, authParameters.ClientID)
+ qv.Set(clientInfo, clientInfoVal)
+ qv.Set("assertion", base64.StdEncoding.WithPadding(base64.StdPadding).EncodeToString([]byte(samlGrant.Assertion)))
+ addScopeQueryParam(qv, authParameters)
+
+ switch samlGrant.AssertionType {
+ case grant.SAMLV1:
+ qv.Set(grantType, grant.SAMLV1)
+ case grant.SAMLV2:
+ qv.Set(grantType, grant.SAMLV2)
+ default:
+ return TokenResponse{}, fmt.Errorf("GetAccessTokenFromSamlGrant returned unknown SAML assertion type: %q", samlGrant.AssertionType)
+ }
+
+ return c.doTokenResp(ctx, authParameters, qv)
+}
+
+func (c Client) doTokenResp(ctx context.Context, authParams authority.AuthParams, qv url.Values) (TokenResponse, error) {
+ resp := TokenResponse{}
+ if authParams.AuthnScheme != nil {
+ trParams := authParams.AuthnScheme.TokenRequestParams()
+ for k, v := range trParams {
+ qv.Set(k, v)
+ }
+ }
+ err := c.Comm.URLFormCall(ctx, authParams.Endpoints.TokenEndpoint, qv, &resp)
+ if err != nil {
+ return resp, err
+ }
+ resp.ComputeScope(authParams)
+ if c.testing {
+ return resp, nil
+ }
+ return resp, resp.Validate()
+}
+
+// prepURLVals returns an url.Values that sets various key/values if we are doing secrets
+// or JWT assertions.
+func prepURLVals(ctx context.Context, cc *Credential, authParams authority.AuthParams) (url.Values, error) {
+ params := url.Values{}
+ if cc.Secret != "" {
+ params.Set("client_secret", cc.Secret)
+ return params, nil
+ }
+
+ jwt, err := cc.JWT(ctx, authParams)
+ if err != nil {
+ return nil, err
+ }
+ params.Set("client_assertion", jwt)
+ params.Set("client_assertion_type", grant.ClientAssertion)
+ return params, nil
+}
+
+// openid required to get an id token
+// offline_access required to get a refresh token
+// profile required to get the client_info field back
+var detectDefaultScopes = map[string]bool{
+ "openid": true,
+ "offline_access": true,
+ "profile": true,
+}
+
+var defaultScopes = []string{"openid", "offline_access", "profile"}
+
+func AppendDefaultScopes(authParameters authority.AuthParams) []string {
+ scopes := make([]string, 0, len(authParameters.Scopes)+len(defaultScopes))
+ for _, scope := range authParameters.Scopes {
+ s := strings.TrimSpace(scope)
+ if s == "" {
+ continue
+ }
+ if detectDefaultScopes[scope] {
+ continue
+ }
+ scopes = append(scopes, scope)
+ }
+ scopes = append(scopes, defaultScopes...)
+ return scopes
+}
+
+// addClaims adds client capabilities and claims from AuthParams to the given url.Values
+func addClaims(v url.Values, ap authority.AuthParams) error {
+ claims, err := ap.MergeCapabilitiesAndClaims()
+ if err == nil && claims != "" {
+ v.Set("claims", claims)
+ }
+ return err
+}
+
+func addScopeQueryParam(queryParams url.Values, authParameters authority.AuthParams) {
+ scopes := AppendDefaultScopes(authParameters)
+ queryParams.Set("scope", strings.Join(scopes, " "))
+}
@@ -0,0 +1,25 @@
+// Code generated by "stringer -type=AppType"; DO NOT EDIT.
+
+package accesstokens
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[ATUnknown-0]
+ _ = x[ATPublic-1]
+ _ = x[ATConfidential-2]
+}
+
+const _AppType_name = "ATUnknownATPublicATConfidential"
+
+var _AppType_index = [...]uint8{0, 9, 17, 31}
+
+func (i AppType) String() string {
+ if i < 0 || i >= AppType(len(_AppType_index)-1) {
+ return "AppType(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _AppType_name[_AppType_index[i]:_AppType_index[i+1]]
+}
@@ -0,0 +1,339 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package accesstokens
+
+import (
+ "bytes"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "reflect"
+ "strings"
+ "time"
+
+ internalTime "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json/types/time"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared"
+)
+
+// IDToken consists of all the information used to validate a user.
+// https://docs.microsoft.com/azure/active-directory/develop/id-tokens .
+type IDToken struct {
+ PreferredUsername string `json:"preferred_username,omitempty"`
+ GivenName string `json:"given_name,omitempty"`
+ FamilyName string `json:"family_name,omitempty"`
+ MiddleName string `json:"middle_name,omitempty"`
+ Name string `json:"name,omitempty"`
+ Oid string `json:"oid,omitempty"`
+ TenantID string `json:"tid,omitempty"`
+ Subject string `json:"sub,omitempty"`
+ UPN string `json:"upn,omitempty"`
+ Email string `json:"email,omitempty"`
+ AlternativeID string `json:"alternative_id,omitempty"`
+ Issuer string `json:"iss,omitempty"`
+ Audience string `json:"aud,omitempty"`
+ ExpirationTime int64 `json:"exp,omitempty"`
+ IssuedAt int64 `json:"iat,omitempty"`
+ NotBefore int64 `json:"nbf,omitempty"`
+ RawToken string
+
+ AdditionalFields map[string]interface{}
+}
+
+var null = []byte("null")
+
+// UnmarshalJSON implements json.Unmarshaler.
+func (i *IDToken) UnmarshalJSON(b []byte) error {
+ if bytes.Equal(null, b) {
+ return nil
+ }
+
+ // Because we have a custom unmarshaler, you
+ // cannot directly call json.Unmarshal here. If you do, it will call this function
+ // recursively until reach our recursion limit. We have to create a new type
+ // that doesn't have this method in order to use json.Unmarshal.
+ type idToken2 IDToken
+
+ jwt := strings.Trim(string(b), `"`)
+ jwtArr := strings.Split(jwt, ".")
+ if len(jwtArr) < 2 {
+ return errors.New("IDToken returned from server is invalid")
+ }
+
+ jwtPart := jwtArr[1]
+ jwtDecoded, err := decodeJWT(jwtPart)
+ if err != nil {
+ return fmt.Errorf("unable to unmarshal IDToken, problem decoding JWT: %w", err)
+ }
+
+ token := idToken2{}
+ err = json.Unmarshal(jwtDecoded, &token)
+ if err != nil {
+ return fmt.Errorf("unable to unmarshal IDToken: %w", err)
+ }
+ token.RawToken = jwt
+
+ *i = IDToken(token)
+ return nil
+}
+
+// IsZero indicates if the IDToken is the zero value.
+func (i IDToken) IsZero() bool {
+ v := reflect.ValueOf(i)
+ for i := 0; i < v.NumField(); i++ {
+ field := v.Field(i)
+ if !field.IsZero() {
+ switch field.Kind() {
+ case reflect.Map, reflect.Slice:
+ if field.Len() == 0 {
+ continue
+ }
+ }
+ return false
+ }
+ }
+ return true
+}
+
+// LocalAccountID extracts an account's local account ID from an ID token.
+func (i IDToken) LocalAccountID() string {
+ if i.Oid != "" {
+ return i.Oid
+ }
+ return i.Subject
+}
+
+// jwtDecoder is provided to allow tests to provide their own.
+var jwtDecoder = decodeJWT
+
+// ClientInfo is used to create a Home Account ID for an account.
+type ClientInfo struct {
+ UID string `json:"uid"`
+ UTID string `json:"utid"`
+
+ AdditionalFields map[string]interface{}
+}
+
+// UnmarshalJSON implements json.Unmarshaler.s
+func (c *ClientInfo) UnmarshalJSON(b []byte) error {
+ s := strings.Trim(string(b), `"`)
+ // Client info may be empty in some flows, e.g. certificate exchange.
+ if len(s) == 0 {
+ return nil
+ }
+
+ // Because we have a custom unmarshaler, you
+ // cannot directly call json.Unmarshal here. If you do, it will call this function
+ // recursively until reach our recursion limit. We have to create a new type
+ // that doesn't have this method in order to use json.Unmarshal.
+ type clientInfo2 ClientInfo
+
+ raw, err := jwtDecoder(s)
+ if err != nil {
+ return fmt.Errorf("TokenResponse client_info field had JWT decode error: %w", err)
+ }
+
+ var c2 clientInfo2
+
+ err = json.Unmarshal(raw, &c2)
+ if err != nil {
+ return fmt.Errorf("was unable to unmarshal decoded JWT in TokenRespone to ClientInfo: %w", err)
+ }
+
+ *c = ClientInfo(c2)
+ return nil
+}
+
+// Scopes represents scopes in a TokenResponse.
+type Scopes struct {
+ Slice []string
+}
+
+// UnmarshalJSON implements json.Unmarshal.
+func (s *Scopes) UnmarshalJSON(b []byte) error {
+ str := strings.Trim(string(b), `"`)
+ if len(str) == 0 {
+ return nil
+ }
+ sl := strings.Split(str, " ")
+ s.Slice = sl
+ return nil
+}
+
+// TokenResponse is the information that is returned from a token endpoint during a token acquisition flow.
+type TokenResponse struct {
+ authority.OAuthResponseBase
+
+ AccessToken string `json:"access_token"`
+ RefreshToken string `json:"refresh_token"`
+ TokenType string `json:"token_type"`
+
+ FamilyID string `json:"foci"`
+ IDToken IDToken `json:"id_token"`
+ ClientInfo ClientInfo `json:"client_info"`
+ ExpiresOn internalTime.DurationTime `json:"expires_in"`
+ ExtExpiresOn internalTime.DurationTime `json:"ext_expires_in"`
+ GrantedScopes Scopes `json:"scope"`
+ DeclinedScopes []string // This is derived
+
+ AdditionalFields map[string]interface{}
+
+ scopesComputed bool
+}
+
+// ComputeScope computes the final scopes based on what was granted by the server and
+// what our AuthParams were from the authority server. Per OAuth spec, if no scopes are returned, the response should be treated as if all scopes were granted
+// This behavior can be observed in client assertion flows, but can happen at any time, this check ensures we treat
+// those special responses properly Link to spec: https://tools.ietf.org/html/rfc6749#section-3.3
+func (tr *TokenResponse) ComputeScope(authParams authority.AuthParams) {
+ if len(tr.GrantedScopes.Slice) == 0 {
+ tr.GrantedScopes = Scopes{Slice: authParams.Scopes}
+ } else {
+ tr.DeclinedScopes = findDeclinedScopes(authParams.Scopes, tr.GrantedScopes.Slice)
+ }
+ tr.scopesComputed = true
+}
+
+// HomeAccountID uniquely identifies the authenticated account, if any. It's "" when the token is an app token.
+func (tr *TokenResponse) HomeAccountID() string {
+ id := tr.IDToken.Subject
+ if uid := tr.ClientInfo.UID; uid != "" {
+ utid := tr.ClientInfo.UTID
+ if utid == "" {
+ utid = uid
+ }
+ id = fmt.Sprintf("%s.%s", uid, utid)
+ }
+ return id
+}
+
+// Validate validates the TokenResponse has basic valid values. It must be called
+// after ComputeScopes() is called.
+func (tr *TokenResponse) Validate() error {
+ if tr.Error != "" {
+ return fmt.Errorf("%s: %s", tr.Error, tr.ErrorDescription)
+ }
+
+ if tr.AccessToken == "" {
+ return errors.New("response is missing access_token")
+ }
+
+ if !tr.scopesComputed {
+ return fmt.Errorf("TokenResponse hasn't had ScopesComputed() called")
+ }
+ return nil
+}
+
+func (tr *TokenResponse) CacheKey(authParams authority.AuthParams) string {
+ if authParams.AuthorizationType == authority.ATOnBehalfOf {
+ return authParams.AssertionHash()
+ }
+ if authParams.AuthorizationType == authority.ATClientCredentials {
+ return authParams.AppKey()
+ }
+ if authParams.IsConfidentialClient || authParams.AuthorizationType == authority.ATRefreshToken {
+ return tr.HomeAccountID()
+ }
+ return ""
+}
+
+func findDeclinedScopes(requestedScopes []string, grantedScopes []string) []string {
+ declined := []string{}
+ grantedMap := map[string]bool{}
+ for _, s := range grantedScopes {
+ grantedMap[strings.ToLower(s)] = true
+ }
+ // Comparing the requested scopes with the granted scopes to see if there are any scopes that have been declined.
+ for _, r := range requestedScopes {
+ if !grantedMap[strings.ToLower(r)] {
+ declined = append(declined, r)
+ }
+ }
+ return declined
+}
+
+// decodeJWT decodes a JWT and converts it to a byte array representing a JSON object
+// JWT has headers and payload base64url encoded without padding
+// https://tools.ietf.org/html/rfc7519#section-3 and
+// https://tools.ietf.org/html/rfc7515#section-2
+func decodeJWT(data string) ([]byte, error) {
+ // https://tools.ietf.org/html/rfc7515#appendix-C
+ return base64.RawURLEncoding.DecodeString(data)
+}
+
+// RefreshToken is the JSON representation of a MSAL refresh token for encoding to storage.
+type RefreshToken struct {
+ HomeAccountID string `json:"home_account_id,omitempty"`
+ Environment string `json:"environment,omitempty"`
+ CredentialType string `json:"credential_type,omitempty"`
+ ClientID string `json:"client_id,omitempty"`
+ FamilyID string `json:"family_id,omitempty"`
+ Secret string `json:"secret,omitempty"`
+ Realm string `json:"realm,omitempty"`
+ Target string `json:"target,omitempty"`
+ UserAssertionHash string `json:"user_assertion_hash,omitempty"`
+
+ AdditionalFields map[string]interface{}
+}
+
+// NewRefreshToken is the constructor for RefreshToken.
+func NewRefreshToken(homeID, env, clientID, refreshToken, familyID string) RefreshToken {
+ return RefreshToken{
+ HomeAccountID: homeID,
+ Environment: env,
+ CredentialType: "RefreshToken",
+ ClientID: clientID,
+ FamilyID: familyID,
+ Secret: refreshToken,
+ }
+}
+
+// Key outputs the key that can be used to uniquely look up this entry in a map.
+func (rt RefreshToken) Key() string {
+ var fourth = rt.FamilyID
+ if fourth == "" {
+ fourth = rt.ClientID
+ }
+
+ key := strings.Join(
+ []string{rt.HomeAccountID, rt.Environment, rt.CredentialType, fourth},
+ shared.CacheKeySeparator,
+ )
+ return strings.ToLower(key)
+}
+
+func (rt RefreshToken) GetSecret() string {
+ return rt.Secret
+}
+
+// DeviceCodeResult stores the response from the STS device code endpoint.
+type DeviceCodeResult struct {
+ // UserCode is the code the user needs to provide when authentication at the verification URI.
+ UserCode string
+ // DeviceCode is the code used in the access token request.
+ DeviceCode string
+ // VerificationURL is the the URL where user can authenticate.
+ VerificationURL string
+ // ExpiresOn is the expiration time of device code in seconds.
+ ExpiresOn time.Time
+ // Interval is the interval at which the STS should be polled at.
+ Interval int
+ // Message is the message which should be displayed to the user.
+ Message string
+ // ClientID is the UUID issued by the authorization server for your application.
+ ClientID string
+ // Scopes is the OpenID scopes used to request access a protected API.
+ Scopes []string
+}
+
+// NewDeviceCodeResult creates a DeviceCodeResult instance.
+func NewDeviceCodeResult(userCode, deviceCode, verificationURL string, expiresOn time.Time, interval int, message, clientID string, scopes []string) DeviceCodeResult {
+ return DeviceCodeResult{userCode, deviceCode, verificationURL, expiresOn, interval, message, clientID, scopes}
+}
+
+func (dcr DeviceCodeResult) String() string {
+ return fmt.Sprintf("UserCode: (%v)\nDeviceCode: (%v)\nURL: (%v)\nMessage: (%v)\n", dcr.UserCode, dcr.DeviceCode, dcr.VerificationURL, dcr.Message)
+
+}
@@ -0,0 +1,589 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package authority
+
+import (
+ "context"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "os"
+ "path"
+ "strings"
+ "time"
+
+ "github.com/google/uuid"
+)
+
+const (
+ authorizationEndpoint = "https://%v/%v/oauth2/v2.0/authorize"
+ instanceDiscoveryEndpoint = "https://%v/common/discovery/instance"
+ tenantDiscoveryEndpointWithRegion = "https://%s.%s/%s/v2.0/.well-known/openid-configuration"
+ regionName = "REGION_NAME"
+ defaultAPIVersion = "2021-10-01"
+ imdsEndpoint = "http://169.254.169.254/metadata/instance/compute/location?format=text&api-version=" + defaultAPIVersion
+ autoDetectRegion = "TryAutoDetect"
+ AccessTokenTypeBearer = "Bearer"
+)
+
+// These are various hosts that host AAD Instance discovery endpoints.
+const (
+ defaultHost = "login.microsoftonline.com"
+ loginMicrosoft = "login.microsoft.com"
+ loginWindows = "login.windows.net"
+ loginSTSWindows = "sts.windows.net"
+ loginMicrosoftOnline = defaultHost
+)
+
+// jsonCaller is an interface that allows us to mock the JSONCall method.
+type jsonCaller interface {
+ JSONCall(ctx context.Context, endpoint string, headers http.Header, qv url.Values, body, resp interface{}) error
+}
+
+var aadTrustedHostList = map[string]bool{
+ "login.windows.net": true, // Microsoft Azure Worldwide - Used in validation scenarios where host is not this list
+ "login.chinacloudapi.cn": true, // Microsoft Azure China
+ "login.microsoftonline.de": true, // Microsoft Azure Blackforest
+ "login-us.microsoftonline.com": true, // Microsoft Azure US Government - Legacy
+ "login.microsoftonline.us": true, // Microsoft Azure US Government
+ "login.microsoftonline.com": true, // Microsoft Azure Worldwide
+ "login.cloudgovapi.us": true, // Microsoft Azure US Government
+}
+
+// TrustedHost checks if an AAD host is trusted/valid.
+func TrustedHost(host string) bool {
+ if _, ok := aadTrustedHostList[host]; ok {
+ return true
+ }
+ return false
+}
+
+// OAuthResponseBase is the base JSON return message for an OAuth call.
+// This is embedded in other calls to get the base fields from every response.
+type OAuthResponseBase struct {
+ Error string `json:"error"`
+ SubError string `json:"suberror"`
+ ErrorDescription string `json:"error_description"`
+ ErrorCodes []int `json:"error_codes"`
+ CorrelationID string `json:"correlation_id"`
+ Claims string `json:"claims"`
+}
+
+// TenantDiscoveryResponse is the tenant endpoints from the OpenID configuration endpoint.
+type TenantDiscoveryResponse struct {
+ OAuthResponseBase
+
+ AuthorizationEndpoint string `json:"authorization_endpoint"`
+ TokenEndpoint string `json:"token_endpoint"`
+ Issuer string `json:"issuer"`
+
+ AdditionalFields map[string]interface{}
+}
+
+// Validate validates that the response had the correct values required.
+func (r *TenantDiscoveryResponse) Validate() error {
+ switch "" {
+ case r.AuthorizationEndpoint:
+ return errors.New("TenantDiscoveryResponse: authorize endpoint was not found in the openid configuration")
+ case r.TokenEndpoint:
+ return errors.New("TenantDiscoveryResponse: token endpoint was not found in the openid configuration")
+ case r.Issuer:
+ return errors.New("TenantDiscoveryResponse: issuer was not found in the openid configuration")
+ }
+ return nil
+}
+
+type InstanceDiscoveryMetadata struct {
+ PreferredNetwork string `json:"preferred_network"`
+ PreferredCache string `json:"preferred_cache"`
+ Aliases []string `json:"aliases"`
+
+ AdditionalFields map[string]interface{}
+}
+
+type InstanceDiscoveryResponse struct {
+ TenantDiscoveryEndpoint string `json:"tenant_discovery_endpoint"`
+ Metadata []InstanceDiscoveryMetadata `json:"metadata"`
+
+ AdditionalFields map[string]interface{}
+}
+
+//go:generate stringer -type=AuthorizeType
+
+// AuthorizeType represents the type of token flow.
+type AuthorizeType int
+
+// These are all the types of token flows.
+const (
+ ATUnknown AuthorizeType = iota
+ ATUsernamePassword
+ ATWindowsIntegrated
+ ATAuthCode
+ ATInteractive
+ ATClientCredentials
+ ATDeviceCode
+ ATRefreshToken
+ AccountByID
+ ATOnBehalfOf
+)
+
+// These are all authority types
+const (
+ AAD = "MSSTS"
+ ADFS = "ADFS"
+)
+
+// AuthenticationScheme is an extensibility mechanism designed to be used only by Azure Arc for proof of possession access tokens.
+type AuthenticationScheme interface {
+ // Extra parameters that are added to the request to the /token endpoint.
+ TokenRequestParams() map[string]string
+ // Key ID of the public / private key pair used by the encryption algorithm, if any.
+ // Tokens obtained by authentication schemes that use this are bound to the KeyId, i.e.
+ // if a different kid is presented, the access token cannot be used.
+ KeyID() string
+ // Creates the access token that goes into an Authorization HTTP header.
+ FormatAccessToken(accessToken string) (string, error)
+ //Expected to match the token_type parameter returned by ESTS. Used to disambiguate
+ // between ATs of different types (e.g. Bearer and PoP) when loading from cache etc.
+ AccessTokenType() string
+}
+
+// default authn scheme realizing AuthenticationScheme for "Bearer" tokens
+type BearerAuthenticationScheme struct{}
+
+var bearerAuthnScheme BearerAuthenticationScheme
+
+func (ba *BearerAuthenticationScheme) TokenRequestParams() map[string]string {
+ return nil
+}
+func (ba *BearerAuthenticationScheme) KeyID() string {
+ return ""
+}
+func (ba *BearerAuthenticationScheme) FormatAccessToken(accessToken string) (string, error) {
+ return accessToken, nil
+}
+func (ba *BearerAuthenticationScheme) AccessTokenType() string {
+ return AccessTokenTypeBearer
+}
+
+// AuthParams represents the parameters used for authorization for token acquisition.
+type AuthParams struct {
+ AuthorityInfo Info
+ CorrelationID string
+ Endpoints Endpoints
+ ClientID string
+ // Redirecturi is used for auth flows that specify a redirect URI (e.g. local server for interactive auth flow).
+ Redirecturi string
+ HomeAccountID string
+ // Username is the user-name portion for username/password auth flow.
+ Username string
+ // Password is the password portion for username/password auth flow.
+ Password string
+ // Scopes is the list of scopes the user consents to.
+ Scopes []string
+ // AuthorizationType specifies the auth flow being used.
+ AuthorizationType AuthorizeType
+ // State is a random value used to prevent cross-site request forgery attacks.
+ State string
+ // CodeChallenge is derived from a code verifier and is sent in the auth request.
+ CodeChallenge string
+ // CodeChallengeMethod describes the method used to create the CodeChallenge.
+ CodeChallengeMethod string
+ // Prompt specifies the user prompt type during interactive auth.
+ Prompt string
+ // IsConfidentialClient specifies if it is a confidential client.
+ IsConfidentialClient bool
+ // SendX5C specifies if x5c claim(public key of the certificate) should be sent to STS.
+ SendX5C bool
+ // UserAssertion is the access token used to acquire token on behalf of user
+ UserAssertion string
+ // Capabilities the client will include with each token request, for example "CP1".
+ // Call [NewClientCapabilities] to construct a value for this field.
+ Capabilities ClientCapabilities
+ // Claims required for an access token to satisfy a conditional access policy
+ Claims string
+ // KnownAuthorityHosts don't require metadata discovery because they're known to the user
+ KnownAuthorityHosts []string
+ // LoginHint is a username with which to pre-populate account selection during interactive auth
+ LoginHint string
+ // DomainHint is a directive that can be used to accelerate the user to their federated IdP sign-in page
+ DomainHint string
+ // AuthnScheme is an optional scheme for formatting access tokens
+ AuthnScheme AuthenticationScheme
+}
+
+// NewAuthParams creates an authorization parameters object.
+func NewAuthParams(clientID string, authorityInfo Info) AuthParams {
+ return AuthParams{
+ ClientID: clientID,
+ AuthorityInfo: authorityInfo,
+ CorrelationID: uuid.New().String(),
+ AuthnScheme: &bearerAuthnScheme,
+ }
+}
+
+// WithTenant returns a copy of the AuthParams having the specified tenant ID. If the given
+// ID is empty, the copy is identical to the original. This function returns an error in
+// several cases:
+// - ID isn't specific (for example, it's "common")
+// - ID is non-empty and the authority doesn't support tenants (for example, it's an ADFS authority)
+// - the client is configured to authenticate only Microsoft accounts via the "consumers" endpoint
+// - the resulting authority URL is invalid
+func (p AuthParams) WithTenant(ID string) (AuthParams, error) {
+ switch ID {
+ case "", p.AuthorityInfo.Tenant:
+ // keep the default tenant because the caller didn't override it
+ return p, nil
+ case "common", "consumers", "organizations":
+ if p.AuthorityInfo.AuthorityType == AAD {
+ return p, fmt.Errorf(`tenant ID must be a specific tenant, not "%s"`, ID)
+ }
+ // else we'll return a better error below
+ }
+ if p.AuthorityInfo.AuthorityType != AAD {
+ return p, errors.New("the authority doesn't support tenants")
+ }
+ if p.AuthorityInfo.Tenant == "consumers" {
+ return p, errors.New(`client is configured to authenticate only personal Microsoft accounts, via the "consumers" endpoint`)
+ }
+ authority := "https://" + path.Join(p.AuthorityInfo.Host, ID)
+ info, err := NewInfoFromAuthorityURI(authority, p.AuthorityInfo.ValidateAuthority, p.AuthorityInfo.InstanceDiscoveryDisabled)
+ if err == nil {
+ info.Region = p.AuthorityInfo.Region
+ p.AuthorityInfo = info
+ }
+ return p, err
+}
+
+// MergeCapabilitiesAndClaims combines client capabilities and challenge claims into a value suitable for an authentication request's "claims" parameter.
+func (p AuthParams) MergeCapabilitiesAndClaims() (string, error) {
+ claims := p.Claims
+ if len(p.Capabilities.asMap) > 0 {
+ if claims == "" {
+ // without claims the result is simply the capabilities
+ return p.Capabilities.asJSON, nil
+ }
+ // Otherwise, merge claims and capabilties into a single JSON object.
+ // We handle the claims challenge as a map because we don't know its structure.
+ var challenge map[string]any
+ if err := json.Unmarshal([]byte(claims), &challenge); err != nil {
+ return "", fmt.Errorf(`claims must be JSON. Are they base64 encoded? json.Unmarshal returned "%v"`, err)
+ }
+ if err := merge(p.Capabilities.asMap, challenge); err != nil {
+ return "", err
+ }
+ b, err := json.Marshal(challenge)
+ if err != nil {
+ return "", err
+ }
+ claims = string(b)
+ }
+ return claims, nil
+}
+
+// merges a into b without overwriting b's values. Returns an error when a and b share a key for which either has a non-object value.
+func merge(a, b map[string]any) error {
+ for k, av := range a {
+ if bv, ok := b[k]; !ok {
+ // b doesn't contain this key => simply set it to a's value
+ b[k] = av
+ } else {
+ // b does contain this key => recursively merge a[k] into b[k], provided both are maps. If a[k] or b[k] isn't
+ // a map, return an error because merging would overwrite some value in b. Errors shouldn't occur in practice
+ // because the challenge will be from AAD, which knows the capabilities format.
+ if A, ok := av.(map[string]any); ok {
+ if B, ok := bv.(map[string]any); ok {
+ return merge(A, B)
+ } else {
+ // b[k] isn't a map
+ return errors.New("challenge claims conflict with client capabilities")
+ }
+ } else {
+ // a[k] isn't a map
+ return errors.New("challenge claims conflict with client capabilities")
+ }
+ }
+ }
+ return nil
+}
+
+// ClientCapabilities stores capabilities in the formats used by AuthParams.MergeCapabilitiesAndClaims.
+// [NewClientCapabilities] precomputes these representations because capabilities are static for the
+// lifetime of a client and are included with every authentication request i.e., these computations
+// always have the same result and would otherwise have to be repeated for every request.
+type ClientCapabilities struct {
+ // asJSON is for the common case: adding the capabilities to an auth request with no challenge claims
+ asJSON string
+ // asMap is for merging the capabilities with challenge claims
+ asMap map[string]any
+}
+
+func NewClientCapabilities(capabilities []string) (ClientCapabilities, error) {
+ c := ClientCapabilities{}
+ var err error
+ if len(capabilities) > 0 {
+ cpbs := make([]string, len(capabilities))
+ for i := 0; i < len(cpbs); i++ {
+ cpbs[i] = fmt.Sprintf(`"%s"`, capabilities[i])
+ }
+ c.asJSON = fmt.Sprintf(`{"access_token":{"xms_cc":{"values":[%s]}}}`, strings.Join(cpbs, ","))
+ // note our JSON is valid but we can't stop users breaking it with garbage like "}"
+ err = json.Unmarshal([]byte(c.asJSON), &c.asMap)
+ }
+ return c, err
+}
+
+// Info consists of information about the authority.
+type Info struct {
+ Host string
+ CanonicalAuthorityURI string
+ AuthorityType string
+ UserRealmURIPrefix string
+ ValidateAuthority bool
+ Tenant string
+ Region string
+ InstanceDiscoveryDisabled bool
+}
+
+func firstPathSegment(u *url.URL) (string, error) {
+ pathParts := strings.Split(u.EscapedPath(), "/")
+ if len(pathParts) >= 2 {
+ return pathParts[1], nil
+ }
+
+ return "", errors.New(`authority must be an https URL such as "https://login.microsoftonline.com/<your tenant>"`)
+}
+
+// NewInfoFromAuthorityURI creates an AuthorityInfo instance from the authority URL provided.
+func NewInfoFromAuthorityURI(authority string, validateAuthority bool, instanceDiscoveryDisabled bool) (Info, error) {
+ u, err := url.Parse(strings.ToLower(authority))
+ if err != nil || u.Scheme != "https" {
+ return Info{}, errors.New(`authority must be an https URL such as "https://login.microsoftonline.com/<your tenant>"`)
+ }
+
+ tenant, err := firstPathSegment(u)
+ if err != nil {
+ return Info{}, err
+ }
+ authorityType := AAD
+ if tenant == "adfs" {
+ authorityType = ADFS
+ }
+
+ // u.Host includes the port, if any, which is required for private cloud deployments
+ return Info{
+ Host: u.Host,
+ CanonicalAuthorityURI: fmt.Sprintf("https://%v/%v/", u.Host, tenant),
+ AuthorityType: authorityType,
+ UserRealmURIPrefix: fmt.Sprintf("https://%v/common/userrealm/", u.Hostname()),
+ ValidateAuthority: validateAuthority,
+ Tenant: tenant,
+ InstanceDiscoveryDisabled: instanceDiscoveryDisabled,
+ }, nil
+}
+
+// Endpoints consists of the endpoints from the tenant discovery response.
+type Endpoints struct {
+ AuthorizationEndpoint string
+ TokenEndpoint string
+ selfSignedJwtAudience string
+ authorityHost string
+}
+
+// NewEndpoints creates an Endpoints object.
+func NewEndpoints(authorizationEndpoint string, tokenEndpoint string, selfSignedJwtAudience string, authorityHost string) Endpoints {
+ return Endpoints{authorizationEndpoint, tokenEndpoint, selfSignedJwtAudience, authorityHost}
+}
+
+// UserRealmAccountType refers to the type of user realm.
+type UserRealmAccountType string
+
+// These are the different types of user realms.
+const (
+ Unknown UserRealmAccountType = ""
+ Federated UserRealmAccountType = "Federated"
+ Managed UserRealmAccountType = "Managed"
+)
+
+// UserRealm is used for the username password request to determine user type
+type UserRealm struct {
+ AccountType UserRealmAccountType `json:"account_type"`
+ DomainName string `json:"domain_name"`
+ CloudInstanceName string `json:"cloud_instance_name"`
+ CloudAudienceURN string `json:"cloud_audience_urn"`
+
+ // required if accountType is Federated
+ FederationProtocol string `json:"federation_protocol"`
+ FederationMetadataURL string `json:"federation_metadata_url"`
+
+ AdditionalFields map[string]interface{}
+}
+
+func (u UserRealm) validate() error {
+ switch "" {
+ case string(u.AccountType):
+ return errors.New("the account type (Federated or Managed) is missing")
+ case u.DomainName:
+ return errors.New("domain name of user realm is missing")
+ case u.CloudInstanceName:
+ return errors.New("cloud instance name of user realm is missing")
+ case u.CloudAudienceURN:
+ return errors.New("cloud Instance URN is missing")
+ }
+
+ if u.AccountType == Federated {
+ switch "" {
+ case u.FederationProtocol:
+ return errors.New("federation protocol of user realm is missing")
+ case u.FederationMetadataURL:
+ return errors.New("federation metadata URL of user realm is missing")
+ }
+ }
+ return nil
+}
+
+// Client represents the REST calls to authority backends.
+type Client struct {
+ // Comm provides the HTTP transport client.
+ Comm jsonCaller // *comm.Client
+}
+
+func (c Client) UserRealm(ctx context.Context, authParams AuthParams) (UserRealm, error) {
+ endpoint := fmt.Sprintf("https://%s/common/UserRealm/%s", authParams.Endpoints.authorityHost, url.PathEscape(authParams.Username))
+ qv := url.Values{
+ "api-version": []string{"1.0"},
+ }
+
+ resp := UserRealm{}
+ err := c.Comm.JSONCall(
+ ctx,
+ endpoint,
+ http.Header{"client-request-id": []string{authParams.CorrelationID}},
+ qv,
+ nil,
+ &resp,
+ )
+ if err != nil {
+ return resp, err
+ }
+
+ return resp, resp.validate()
+}
+
+func (c Client) GetTenantDiscoveryResponse(ctx context.Context, openIDConfigurationEndpoint string) (TenantDiscoveryResponse, error) {
+ resp := TenantDiscoveryResponse{}
+ err := c.Comm.JSONCall(
+ ctx,
+ openIDConfigurationEndpoint,
+ http.Header{},
+ nil,
+ nil,
+ &resp,
+ )
+
+ return resp, err
+}
+
+// AADInstanceDiscovery attempts to discover a tenant endpoint (used in OIDC auth with an authorization endpoint).
+// This is done by AAD which allows for aliasing of tenants (windows.sts.net is the same as login.windows.com).
+func (c Client) AADInstanceDiscovery(ctx context.Context, authorityInfo Info) (InstanceDiscoveryResponse, error) {
+ region := ""
+ var err error
+ resp := InstanceDiscoveryResponse{}
+ if authorityInfo.Region != "" && authorityInfo.Region != autoDetectRegion {
+ region = authorityInfo.Region
+ } else if authorityInfo.Region == autoDetectRegion {
+ region = detectRegion(ctx)
+ }
+ if region != "" {
+ environment := authorityInfo.Host
+ switch environment {
+ case loginMicrosoft, loginWindows, loginSTSWindows, defaultHost:
+ environment = loginMicrosoft
+ }
+
+ resp.TenantDiscoveryEndpoint = fmt.Sprintf(tenantDiscoveryEndpointWithRegion, region, environment, authorityInfo.Tenant)
+ metadata := InstanceDiscoveryMetadata{
+ PreferredNetwork: fmt.Sprintf("%v.%v", region, authorityInfo.Host),
+ PreferredCache: authorityInfo.Host,
+ Aliases: []string{fmt.Sprintf("%v.%v", region, authorityInfo.Host), authorityInfo.Host},
+ }
+ resp.Metadata = []InstanceDiscoveryMetadata{metadata}
+ } else {
+ qv := url.Values{}
+ qv.Set("api-version", "1.1")
+ qv.Set("authorization_endpoint", fmt.Sprintf(authorizationEndpoint, authorityInfo.Host, authorityInfo.Tenant))
+
+ discoveryHost := defaultHost
+ if TrustedHost(authorityInfo.Host) {
+ discoveryHost = authorityInfo.Host
+ }
+
+ endpoint := fmt.Sprintf(instanceDiscoveryEndpoint, discoveryHost)
+ err = c.Comm.JSONCall(ctx, endpoint, http.Header{}, qv, nil, &resp)
+ }
+ return resp, err
+}
+
+func detectRegion(ctx context.Context) string {
+ region := os.Getenv(regionName)
+ if region != "" {
+ region = strings.ReplaceAll(region, " ", "")
+ return strings.ToLower(region)
+ }
+ // HTTP call to IMDS endpoint to get region
+ // Refer : https://identitydivision.visualstudio.com/DevEx/_git/AuthLibrariesApiReview?path=%2FPinAuthToRegion%2FAAD%20SDK%20Proposal%20to%20Pin%20Auth%20to%20region.md&_a=preview&version=GBdev
+ // Set a 2 second timeout for this http client which only does calls to IMDS endpoint
+ client := http.Client{
+ Timeout: time.Duration(2 * time.Second),
+ }
+ req, _ := http.NewRequest("GET", imdsEndpoint, nil)
+ req.Header.Set("Metadata", "true")
+ resp, err := client.Do(req)
+ // If the request times out or there is an error, it is retried once
+ if err != nil || resp.StatusCode != 200 {
+ resp, err = client.Do(req)
+ if err != nil || resp.StatusCode != 200 {
+ return ""
+ }
+ }
+ defer resp.Body.Close()
+ response, err := io.ReadAll(resp.Body)
+ if err != nil {
+ return ""
+ }
+ return string(response)
+}
+
+func (a *AuthParams) CacheKey(isAppCache bool) string {
+ if a.AuthorizationType == ATOnBehalfOf {
+ return a.AssertionHash()
+ }
+ if a.AuthorizationType == ATClientCredentials || isAppCache {
+ return a.AppKey()
+ }
+ if a.AuthorizationType == ATRefreshToken || a.AuthorizationType == AccountByID {
+ return a.HomeAccountID
+ }
+ return ""
+}
+func (a *AuthParams) AssertionHash() string {
+ hasher := sha256.New()
+ // Per documentation this never returns an error : https://pkg.go.dev/hash#pkg-types
+ _, _ = hasher.Write([]byte(a.UserAssertion))
+ sha := base64.URLEncoding.EncodeToString(hasher.Sum(nil))
+ return sha
+}
+
+func (a *AuthParams) AppKey() string {
+ if a.AuthorityInfo.Tenant != "" {
+ return fmt.Sprintf("%s_%s_AppTokenCache", a.ClientID, a.AuthorityInfo.Tenant)
+ }
+ return fmt.Sprintf("%s__AppTokenCache", a.ClientID)
+}
@@ -0,0 +1,30 @@
+// Code generated by "stringer -type=AuthorizeType"; DO NOT EDIT.
+
+package authority
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[ATUnknown-0]
+ _ = x[ATUsernamePassword-1]
+ _ = x[ATWindowsIntegrated-2]
+ _ = x[ATAuthCode-3]
+ _ = x[ATInteractive-4]
+ _ = x[ATClientCredentials-5]
+ _ = x[ATDeviceCode-6]
+ _ = x[ATRefreshToken-7]
+}
+
+const _AuthorizeType_name = "ATUnknownATUsernamePasswordATWindowsIntegratedATAuthCodeATInteractiveATClientCredentialsATDeviceCodeATRefreshToken"
+
+var _AuthorizeType_index = [...]uint8{0, 9, 27, 46, 56, 69, 88, 100, 114}
+
+func (i AuthorizeType) String() string {
+ if i < 0 || i >= AuthorizeType(len(_AuthorizeType_index)-1) {
+ return "AuthorizeType(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _AuthorizeType_name[_AuthorizeType_index[i]:_AuthorizeType_index[i+1]]
+}
@@ -0,0 +1,320 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// Package comm provides helpers for communicating with HTTP backends.
+package comm
+
+import (
+ "bytes"
+ "context"
+ "encoding/json"
+ "encoding/xml"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "reflect"
+ "runtime"
+ "strings"
+ "time"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/errors"
+ customJSON "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/json"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/version"
+ "github.com/google/uuid"
+)
+
+// HTTPClient represents an HTTP client.
+// It's usually an *http.Client from the standard library.
+type HTTPClient interface {
+ // Do sends an HTTP request and returns an HTTP response.
+ Do(req *http.Request) (*http.Response, error)
+
+ // CloseIdleConnections closes any idle connections in a "keep-alive" state.
+ CloseIdleConnections()
+}
+
+// Client provides a wrapper to our *http.Client that handles compression and serialization needs.
+type Client struct {
+ client HTTPClient
+}
+
+// New returns a new Client object.
+func New(httpClient HTTPClient) *Client {
+ if httpClient == nil {
+ panic("http.Client cannot == nil")
+ }
+
+ return &Client{client: httpClient}
+}
+
+// JSONCall connects to the REST endpoint passing the HTTP query values, headers and JSON conversion
+// of body in the HTTP body. It automatically handles compression and decompression with gzip. The response is JSON
+// unmarshalled into resp. resp must be a pointer to a struct. If the body struct contains a field called
+// "AdditionalFields" we use a custom marshal/unmarshal engine.
+func (c *Client) JSONCall(ctx context.Context, endpoint string, headers http.Header, qv url.Values, body, resp interface{}) error {
+ if qv == nil {
+ qv = url.Values{}
+ }
+
+ v := reflect.ValueOf(resp)
+ if err := c.checkResp(v); err != nil {
+ return err
+ }
+
+ // Choose a JSON marshal/unmarshal depending on if we have AdditionalFields attribute.
+ var marshal = json.Marshal
+ var unmarshal = json.Unmarshal
+ if _, ok := v.Elem().Type().FieldByName("AdditionalFields"); ok {
+ marshal = customJSON.Marshal
+ unmarshal = customJSON.Unmarshal
+ }
+
+ u, err := url.Parse(endpoint)
+ if err != nil {
+ return fmt.Errorf("could not parse path URL(%s): %w", endpoint, err)
+ }
+ u.RawQuery = qv.Encode()
+
+ addStdHeaders(headers)
+
+ req := &http.Request{Method: http.MethodGet, URL: u, Header: headers}
+
+ if body != nil {
+ // Note: In case your wondering why we are not gzip encoding....
+ // I'm not sure if these various services support gzip on send.
+ headers.Add("Content-Type", "application/json; charset=utf-8")
+ data, err := marshal(body)
+ if err != nil {
+ return fmt.Errorf("bug: conn.Call(): could not marshal the body object: %w", err)
+ }
+ req.Body = io.NopCloser(bytes.NewBuffer(data))
+ req.Method = http.MethodPost
+ }
+
+ data, err := c.do(ctx, req)
+ if err != nil {
+ return err
+ }
+
+ if resp != nil {
+ if err := unmarshal(data, resp); err != nil {
+ return fmt.Errorf("json decode error: %w\njson message bytes were: %s", err, string(data))
+ }
+ }
+ return nil
+}
+
+// XMLCall connects to an endpoint and decodes the XML response into resp. This is used when
+// sending application/xml . If sending XML via SOAP, use SOAPCall().
+func (c *Client) XMLCall(ctx context.Context, endpoint string, headers http.Header, qv url.Values, resp interface{}) error {
+ if err := c.checkResp(reflect.ValueOf(resp)); err != nil {
+ return err
+ }
+
+ if qv == nil {
+ qv = url.Values{}
+ }
+
+ u, err := url.Parse(endpoint)
+ if err != nil {
+ return fmt.Errorf("could not parse path URL(%s): %w", endpoint, err)
+ }
+ u.RawQuery = qv.Encode()
+
+ headers.Set("Content-Type", "application/xml; charset=utf-8") // This was not set in he original Mex(), but...
+ addStdHeaders(headers)
+
+ return c.xmlCall(ctx, u, headers, "", resp)
+}
+
+// SOAPCall returns the SOAP message given an endpoint, action, body of the request and the response object to marshal into.
+func (c *Client) SOAPCall(ctx context.Context, endpoint, action string, headers http.Header, qv url.Values, body string, resp interface{}) error {
+ if body == "" {
+ return fmt.Errorf("cannot make a SOAP call with body set to empty string")
+ }
+
+ if err := c.checkResp(reflect.ValueOf(resp)); err != nil {
+ return err
+ }
+
+ if qv == nil {
+ qv = url.Values{}
+ }
+
+ u, err := url.Parse(endpoint)
+ if err != nil {
+ return fmt.Errorf("could not parse path URL(%s): %w", endpoint, err)
+ }
+ u.RawQuery = qv.Encode()
+
+ headers.Set("Content-Type", "application/soap+xml; charset=utf-8")
+ headers.Set("SOAPAction", action)
+ addStdHeaders(headers)
+
+ return c.xmlCall(ctx, u, headers, body, resp)
+}
+
+// xmlCall sends an XML in body and decodes into resp. This simply does the transport and relies on
+// an upper level call to set things such as SOAP parameters and Content-Type, if required.
+func (c *Client) xmlCall(ctx context.Context, u *url.URL, headers http.Header, body string, resp interface{}) error {
+ req := &http.Request{Method: http.MethodGet, URL: u, Header: headers}
+
+ if len(body) > 0 {
+ req.Method = http.MethodPost
+ req.Body = io.NopCloser(strings.NewReader(body))
+ }
+
+ data, err := c.do(ctx, req)
+ if err != nil {
+ return err
+ }
+
+ return xml.Unmarshal(data, resp)
+}
+
+// URLFormCall is used to make a call where we need to send application/x-www-form-urlencoded data
+// to the backend and receive JSON back. qv will be encoded into the request body.
+func (c *Client) URLFormCall(ctx context.Context, endpoint string, qv url.Values, resp interface{}) error {
+ if len(qv) == 0 {
+ return fmt.Errorf("URLFormCall() requires qv to have non-zero length")
+ }
+
+ if err := c.checkResp(reflect.ValueOf(resp)); err != nil {
+ return err
+ }
+
+ u, err := url.Parse(endpoint)
+ if err != nil {
+ return fmt.Errorf("could not parse path URL(%s): %w", endpoint, err)
+ }
+
+ headers := http.Header{}
+ headers.Set("Content-Type", "application/x-www-form-urlencoded; charset=utf-8")
+ addStdHeaders(headers)
+
+ enc := qv.Encode()
+
+ req := &http.Request{
+ Method: http.MethodPost,
+ URL: u,
+ Header: headers,
+ ContentLength: int64(len(enc)),
+ Body: io.NopCloser(strings.NewReader(enc)),
+ GetBody: func() (io.ReadCloser, error) {
+ return io.NopCloser(strings.NewReader(enc)), nil
+ },
+ }
+
+ data, err := c.do(ctx, req)
+ if err != nil {
+ return err
+ }
+
+ v := reflect.ValueOf(resp)
+ if err := c.checkResp(v); err != nil {
+ return err
+ }
+
+ var unmarshal = json.Unmarshal
+ if _, ok := v.Elem().Type().FieldByName("AdditionalFields"); ok {
+ unmarshal = customJSON.Unmarshal
+ }
+ if resp != nil {
+ if err := unmarshal(data, resp); err != nil {
+ return fmt.Errorf("json decode error: %w\nraw message was: %s", err, string(data))
+ }
+ }
+ return nil
+}
+
+// do makes the HTTP call to the server and returns the contents of the body.
+func (c *Client) do(ctx context.Context, req *http.Request) ([]byte, error) {
+ if _, ok := ctx.Deadline(); !ok {
+ var cancel context.CancelFunc
+ ctx, cancel = context.WithTimeout(ctx, 30*time.Second)
+ defer cancel()
+ }
+ req = req.WithContext(ctx)
+
+ reply, err := c.client.Do(req)
+ if err != nil {
+ return nil, fmt.Errorf("server response error:\n %w", err)
+ }
+ defer reply.Body.Close()
+
+ data, err := c.readBody(reply)
+ if err != nil {
+ return nil, fmt.Errorf("could not read the body of an HTTP Response: %w", err)
+ }
+ reply.Body = io.NopCloser(bytes.NewBuffer(data))
+
+ // NOTE: This doesn't happen immediately after the call so that we can get an error message
+ // from the server and include it in our error.
+ switch reply.StatusCode {
+ case 200, 201:
+ default:
+ sd := strings.TrimSpace(string(data))
+ if sd != "" {
+ // We probably have the error in the body.
+ return nil, errors.CallErr{
+ Req: req,
+ Resp: reply,
+ Err: fmt.Errorf("http call(%s)(%s) error: reply status code was %d:\n%s", req.URL.String(), req.Method, reply.StatusCode, sd),
+ }
+ }
+ return nil, errors.CallErr{
+ Req: req,
+ Resp: reply,
+ Err: fmt.Errorf("http call(%s)(%s) error: reply status code was %d", req.URL.String(), req.Method, reply.StatusCode),
+ }
+ }
+
+ return data, nil
+}
+
+// checkResp checks a response object o make sure it is a pointer to a struct.
+func (c *Client) checkResp(v reflect.Value) error {
+ if v.Kind() != reflect.Ptr {
+ return fmt.Errorf("bug: resp argument must a *struct, was %T", v.Interface())
+ }
+ v = v.Elem()
+ if v.Kind() != reflect.Struct {
+ return fmt.Errorf("bug: resp argument must be a *struct, was %T", v.Interface())
+ }
+ return nil
+}
+
+// readBody reads the body out of an *http.Response. It supports gzip encoded responses.
+func (c *Client) readBody(resp *http.Response) ([]byte, error) {
+ var reader io.Reader = resp.Body
+ switch resp.Header.Get("Content-Encoding") {
+ case "":
+ // Do nothing
+ case "gzip":
+ reader = gzipDecompress(resp.Body)
+ default:
+ return nil, fmt.Errorf("bug: comm.Client.JSONCall(): content was send with unsupported content-encoding %s", resp.Header.Get("Content-Encoding"))
+ }
+ return io.ReadAll(reader)
+}
+
+var testID string
+
+// addStdHeaders adds the standard headers we use on all calls.
+func addStdHeaders(headers http.Header) http.Header {
+ headers.Set("Accept-Encoding", "gzip")
+ // So that I can have a static id for tests.
+ if testID != "" {
+ headers.Set("client-request-id", testID)
+ headers.Set("Return-Client-Request-Id", "false")
+ } else {
+ headers.Set("client-request-id", uuid.New().String())
+ headers.Set("Return-Client-Request-Id", "false")
+ }
+ headers.Set("x-client-sku", "MSAL.Go")
+ headers.Set("x-client-os", runtime.GOOS)
+ headers.Set("x-client-cpu", runtime.GOARCH)
+ headers.Set("x-client-ver", version.Version)
+ return headers
+}
@@ -0,0 +1,33 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package comm
+
+import (
+ "compress/gzip"
+ "io"
+)
+
+func gzipDecompress(r io.Reader) io.Reader {
+ gzipReader, _ := gzip.NewReader(r)
+
+ pipeOut, pipeIn := io.Pipe()
+ go func() {
+ // decompression bomb would have to come from Azure services.
+ // If we want to limit, we should do that in comm.do().
+ _, err := io.Copy(pipeIn, gzipReader) //nolint
+ if err != nil {
+ // don't need the error.
+ pipeIn.CloseWithError(err) //nolint
+ gzipReader.Close()
+ return
+ }
+ if err := gzipReader.Close(); err != nil {
+ // don't need the error.
+ pipeIn.CloseWithError(err) //nolint
+ return
+ }
+ pipeIn.Close()
+ }()
+ return pipeOut
+}
@@ -0,0 +1,17 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// Package grant holds types of grants issued by authorization services.
+package grant
+
+const (
+ Password = "password"
+ JWT = "urn:ietf:params:oauth:grant-type:jwt-bearer"
+ SAMLV1 = "urn:ietf:params:oauth:grant-type:saml1_1-bearer"
+ SAMLV2 = "urn:ietf:params:oauth:grant-type:saml2-bearer"
+ DeviceCode = "device_code"
+ AuthCode = "authorization_code"
+ RefreshToken = "refresh_token"
+ ClientCredential = "client_credentials"
+ ClientAssertion = "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"
+)
@@ -0,0 +1,56 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+/*
+Package ops provides operations to various backend services using REST clients.
+
+The REST type provides several clients that can be used to communicate to backends.
+Usage is simple:
+
+ rest := ops.New()
+
+ // Creates an authority client and calls the UserRealm() method.
+ userRealm, err := rest.Authority().UserRealm(ctx, authParameters)
+ if err != nil {
+ // Do something
+ }
+*/
+package ops
+
+import (
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/internal/comm"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust"
+)
+
+// HTTPClient represents an HTTP client.
+// It's usually an *http.Client from the standard library.
+type HTTPClient = comm.HTTPClient
+
+// REST provides REST clients for communicating with various backends used by MSAL.
+type REST struct {
+ client *comm.Client
+}
+
+// New is the constructor for REST.
+func New(httpClient HTTPClient) *REST {
+ return &REST{client: comm.New(httpClient)}
+}
+
+// Authority returns a client for querying information about various authorities.
+func (r *REST) Authority() authority.Client {
+ return authority.Client{Comm: r.client}
+}
+
+// AccessTokens returns a client that can be used to get various access tokens for
+// authorization purposes.
+func (r *REST) AccessTokens() accesstokens.Client {
+ return accesstokens.Client{Comm: r.client}
+}
+
+// WSTrust provides access to various metadata in a WSTrust service. This data can
+// be used to gain tokens based on SAML data using the client provided by AccessTokens().
+func (r *REST) WSTrust() wstrust.Client {
+ return wstrust.Client{Comm: r.client}
+}
@@ -0,0 +1,25 @@
+// Code generated by "stringer -type=endpointType"; DO NOT EDIT.
+
+package defs
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[etUnknown-0]
+ _ = x[etUsernamePassword-1]
+ _ = x[etWindowsTransport-2]
+}
+
+const _endpointType_name = "etUnknownetUsernamePasswordetWindowsTransport"
+
+var _endpointType_index = [...]uint8{0, 9, 27, 45}
+
+func (i endpointType) String() string {
+ if i < 0 || i >= endpointType(len(_endpointType_index)-1) {
+ return "endpointType(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _endpointType_name[_endpointType_index[i]:_endpointType_index[i+1]]
+}
@@ -0,0 +1,394 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package defs
+
+import "encoding/xml"
+
+type Definitions struct {
+ XMLName xml.Name `xml:"definitions"`
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ TargetNamespace string `xml:"targetNamespace,attr"`
+ WSDL string `xml:"wsdl,attr"`
+ XSD string `xml:"xsd,attr"`
+ T string `xml:"t,attr"`
+ SOAPENC string `xml:"soapenc,attr"`
+ SOAP string `xml:"soap,attr"`
+ TNS string `xml:"tns,attr"`
+ MSC string `xml:"msc,attr"`
+ WSAM string `xml:"wsam,attr"`
+ SOAP12 string `xml:"soap12,attr"`
+ WSA10 string `xml:"wsa10,attr"`
+ WSA string `xml:"wsa,attr"`
+ WSAW string `xml:"wsaw,attr"`
+ WSX string `xml:"wsx,attr"`
+ WSAP string `xml:"wsap,attr"`
+ WSU string `xml:"wsu,attr"`
+ Trust string `xml:"trust,attr"`
+ WSP string `xml:"wsp,attr"`
+ Policy []Policy `xml:"Policy"`
+ Types Types `xml:"types"`
+ Message []Message `xml:"message"`
+ PortType []PortType `xml:"portType"`
+ Binding []Binding `xml:"binding"`
+ Service Service `xml:"service"`
+}
+
+type Policy struct {
+ Text string `xml:",chardata"`
+ ID string `xml:"Id,attr"`
+ ExactlyOne ExactlyOne `xml:"ExactlyOne"`
+}
+
+type ExactlyOne struct {
+ Text string `xml:",chardata"`
+ All All `xml:"All"`
+}
+
+type All struct {
+ Text string `xml:",chardata"`
+ NegotiateAuthentication NegotiateAuthentication `xml:"NegotiateAuthentication"`
+ TransportBinding TransportBinding `xml:"TransportBinding"`
+ UsingAddressing Text `xml:"UsingAddressing"`
+ EndorsingSupportingTokens EndorsingSupportingTokens `xml:"EndorsingSupportingTokens"`
+ WSS11 WSS11 `xml:"Wss11"`
+ Trust10 Trust10 `xml:"Trust10"`
+ SignedSupportingTokens SignedSupportingTokens `xml:"SignedSupportingTokens"`
+ Trust13 WSTrust13 `xml:"Trust13"`
+ SignedEncryptedSupportingTokens SignedEncryptedSupportingTokens `xml:"SignedEncryptedSupportingTokens"`
+}
+
+type NegotiateAuthentication struct {
+ Text string `xml:",chardata"`
+ HTTP string `xml:"http,attr"`
+ XMLName xml.Name
+}
+
+type TransportBinding struct {
+ Text string `xml:",chardata"`
+ SP string `xml:"sp,attr"`
+ Policy TransportBindingPolicy `xml:"Policy"`
+}
+
+type TransportBindingPolicy struct {
+ Text string `xml:",chardata"`
+ TransportToken TransportToken `xml:"TransportToken"`
+ AlgorithmSuite AlgorithmSuite `xml:"AlgorithmSuite"`
+ Layout Layout `xml:"Layout"`
+ IncludeTimestamp Text `xml:"IncludeTimestamp"`
+}
+
+type TransportToken struct {
+ Text string `xml:",chardata"`
+ Policy TransportTokenPolicy `xml:"Policy"`
+}
+
+type TransportTokenPolicy struct {
+ Text string `xml:",chardata"`
+ HTTPSToken HTTPSToken `xml:"HttpsToken"`
+}
+
+type HTTPSToken struct {
+ Text string `xml:",chardata"`
+ RequireClientCertificate string `xml:"RequireClientCertificate,attr"`
+}
+
+type AlgorithmSuite struct {
+ Text string `xml:",chardata"`
+ Policy AlgorithmSuitePolicy `xml:"Policy"`
+}
+
+type AlgorithmSuitePolicy struct {
+ Text string `xml:",chardata"`
+ Basic256 Text `xml:"Basic256"`
+ Basic128 Text `xml:"Basic128"`
+}
+
+type Layout struct {
+ Text string `xml:",chardata"`
+ Policy LayoutPolicy `xml:"Policy"`
+}
+
+type LayoutPolicy struct {
+ Text string `xml:",chardata"`
+ Strict Text `xml:"Strict"`
+}
+
+type EndorsingSupportingTokens struct {
+ Text string `xml:",chardata"`
+ SP string `xml:"sp,attr"`
+ Policy EndorsingSupportingTokensPolicy `xml:"Policy"`
+}
+
+type EndorsingSupportingTokensPolicy struct {
+ Text string `xml:",chardata"`
+ X509Token X509Token `xml:"X509Token"`
+ RSAToken RSAToken `xml:"RsaToken"`
+ SignedParts SignedParts `xml:"SignedParts"`
+ KerberosToken KerberosToken `xml:"KerberosToken"`
+ IssuedToken IssuedToken `xml:"IssuedToken"`
+ KeyValueToken KeyValueToken `xml:"KeyValueToken"`
+}
+
+type X509Token struct {
+ Text string `xml:",chardata"`
+ IncludeToken string `xml:"IncludeToken,attr"`
+ Policy X509TokenPolicy `xml:"Policy"`
+}
+
+type X509TokenPolicy struct {
+ Text string `xml:",chardata"`
+ RequireThumbprintReference Text `xml:"RequireThumbprintReference"`
+ WSSX509V3Token10 Text `xml:"WssX509V3Token10"`
+}
+
+type RSAToken struct {
+ Text string `xml:",chardata"`
+ IncludeToken string `xml:"IncludeToken,attr"`
+ Optional string `xml:"Optional,attr"`
+ MSSP string `xml:"mssp,attr"`
+}
+
+type SignedParts struct {
+ Text string `xml:",chardata"`
+ Header SignedPartsHeader `xml:"Header"`
+}
+
+type SignedPartsHeader struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"Name,attr"`
+ Namespace string `xml:"Namespace,attr"`
+}
+
+type KerberosToken struct {
+ Text string `xml:",chardata"`
+ IncludeToken string `xml:"IncludeToken,attr"`
+ Policy KerberosTokenPolicy `xml:"Policy"`
+}
+
+type KerberosTokenPolicy struct {
+ Text string `xml:",chardata"`
+ WSSGSSKerberosV5ApReqToken11 Text `xml:"WssGssKerberosV5ApReqToken11"`
+}
+
+type IssuedToken struct {
+ Text string `xml:",chardata"`
+ IncludeToken string `xml:"IncludeToken,attr"`
+ RequestSecurityTokenTemplate RequestSecurityTokenTemplate `xml:"RequestSecurityTokenTemplate"`
+ Policy IssuedTokenPolicy `xml:"Policy"`
+}
+
+type RequestSecurityTokenTemplate struct {
+ Text string `xml:",chardata"`
+ KeyType Text `xml:"KeyType"`
+ EncryptWith Text `xml:"EncryptWith"`
+ SignatureAlgorithm Text `xml:"SignatureAlgorithm"`
+ CanonicalizationAlgorithm Text `xml:"CanonicalizationAlgorithm"`
+ EncryptionAlgorithm Text `xml:"EncryptionAlgorithm"`
+ KeySize Text `xml:"KeySize"`
+ KeyWrapAlgorithm Text `xml:"KeyWrapAlgorithm"`
+}
+
+type IssuedTokenPolicy struct {
+ Text string `xml:",chardata"`
+ RequireInternalReference Text `xml:"RequireInternalReference"`
+}
+
+type KeyValueToken struct {
+ Text string `xml:",chardata"`
+ IncludeToken string `xml:"IncludeToken,attr"`
+ Optional string `xml:"Optional,attr"`
+}
+
+type WSS11 struct {
+ Text string `xml:",chardata"`
+ SP string `xml:"sp,attr"`
+ Policy Wss11Policy `xml:"Policy"`
+}
+
+type Wss11Policy struct {
+ Text string `xml:",chardata"`
+ MustSupportRefThumbprint Text `xml:"MustSupportRefThumbprint"`
+}
+
+type Trust10 struct {
+ Text string `xml:",chardata"`
+ SP string `xml:"sp,attr"`
+ Policy Trust10Policy `xml:"Policy"`
+}
+
+type Trust10Policy struct {
+ Text string `xml:",chardata"`
+ MustSupportIssuedTokens Text `xml:"MustSupportIssuedTokens"`
+ RequireClientEntropy Text `xml:"RequireClientEntropy"`
+ RequireServerEntropy Text `xml:"RequireServerEntropy"`
+}
+
+type SignedSupportingTokens struct {
+ Text string `xml:",chardata"`
+ SP string `xml:"sp,attr"`
+ Policy SupportingTokensPolicy `xml:"Policy"`
+}
+
+type SupportingTokensPolicy struct {
+ Text string `xml:",chardata"`
+ UsernameToken UsernameToken `xml:"UsernameToken"`
+}
+type UsernameToken struct {
+ Text string `xml:",chardata"`
+ IncludeToken string `xml:"IncludeToken,attr"`
+ Policy UsernameTokenPolicy `xml:"Policy"`
+}
+
+type UsernameTokenPolicy struct {
+ Text string `xml:",chardata"`
+ WSSUsernameToken10 WSSUsernameToken10 `xml:"WssUsernameToken10"`
+}
+
+type WSSUsernameToken10 struct {
+ Text string `xml:",chardata"`
+ XMLName xml.Name
+}
+
+type WSTrust13 struct {
+ Text string `xml:",chardata"`
+ SP string `xml:"sp,attr"`
+ Policy WSTrust13Policy `xml:"Policy"`
+}
+
+type WSTrust13Policy struct {
+ Text string `xml:",chardata"`
+ MustSupportIssuedTokens Text `xml:"MustSupportIssuedTokens"`
+ RequireClientEntropy Text `xml:"RequireClientEntropy"`
+ RequireServerEntropy Text `xml:"RequireServerEntropy"`
+}
+
+type SignedEncryptedSupportingTokens struct {
+ Text string `xml:",chardata"`
+ SP string `xml:"sp,attr"`
+ Policy SupportingTokensPolicy `xml:"Policy"`
+}
+
+type Types struct {
+ Text string `xml:",chardata"`
+ Schema Schema `xml:"schema"`
+}
+
+type Schema struct {
+ Text string `xml:",chardata"`
+ TargetNamespace string `xml:"targetNamespace,attr"`
+ Import []Import `xml:"import"`
+}
+
+type Import struct {
+ Text string `xml:",chardata"`
+ SchemaLocation string `xml:"schemaLocation,attr"`
+ Namespace string `xml:"namespace,attr"`
+}
+
+type Message struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ Part Part `xml:"part"`
+}
+
+type Part struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ Element string `xml:"element,attr"`
+}
+
+type PortType struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ Operation Operation `xml:"operation"`
+}
+
+type Operation struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ Input OperationIO `xml:"input"`
+ Output OperationIO `xml:"output"`
+}
+
+type OperationIO struct {
+ Text string `xml:",chardata"`
+ Action string `xml:"Action,attr"`
+ Message string `xml:"message,attr"`
+ Body OperationIOBody `xml:"body"`
+}
+
+type OperationIOBody struct {
+ Text string `xml:",chardata"`
+ Use string `xml:"use,attr"`
+}
+
+type Binding struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ Type string `xml:"type,attr"`
+ PolicyReference PolicyReference `xml:"PolicyReference"`
+ Binding DefinitionsBinding `xml:"binding"`
+ Operation BindingOperation `xml:"operation"`
+}
+
+type PolicyReference struct {
+ Text string `xml:",chardata"`
+ URI string `xml:"URI,attr"`
+}
+
+type DefinitionsBinding struct {
+ Text string `xml:",chardata"`
+ Transport string `xml:"transport,attr"`
+}
+
+type BindingOperation struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ Operation BindingOperationOperation `xml:"operation"`
+ Input BindingOperationIO `xml:"input"`
+ Output BindingOperationIO `xml:"output"`
+}
+
+type BindingOperationOperation struct {
+ Text string `xml:",chardata"`
+ SoapAction string `xml:"soapAction,attr"`
+ Style string `xml:"style,attr"`
+}
+
+type BindingOperationIO struct {
+ Text string `xml:",chardata"`
+ Body OperationIOBody `xml:"body"`
+}
+
+type Service struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ Port []Port `xml:"port"`
+}
+
+type Port struct {
+ Text string `xml:",chardata"`
+ Name string `xml:"name,attr"`
+ Binding string `xml:"binding,attr"`
+ Address Address `xml:"address"`
+ EndpointReference PortEndpointReference `xml:"EndpointReference"`
+}
+
+type Address struct {
+ Text string `xml:",chardata"`
+ Location string `xml:"location,attr"`
+}
+
+type PortEndpointReference struct {
+ Text string `xml:",chardata"`
+ Address Text `xml:"Address"`
+ Identity Identity `xml:"Identity"`
+}
+
+type Identity struct {
+ Text string `xml:",chardata"`
+ XMLNS string `xml:"xmlns,attr"`
+ SPN Text `xml:"Spn"`
+}
@@ -0,0 +1,230 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package defs
+
+import "encoding/xml"
+
+// TODO(msal): Someone (and it ain't gonna be me) needs to document these attributes or
+// at the least put a link to RFC.
+
+type SAMLDefinitions struct {
+ XMLName xml.Name `xml:"Envelope"`
+ Text string `xml:",chardata"`
+ S string `xml:"s,attr"`
+ A string `xml:"a,attr"`
+ U string `xml:"u,attr"`
+ Header Header `xml:"Header"`
+ Body Body `xml:"Body"`
+}
+
+type Header struct {
+ Text string `xml:",chardata"`
+ Action Action `xml:"Action"`
+ Security Security `xml:"Security"`
+}
+
+type Action struct {
+ Text string `xml:",chardata"`
+ MustUnderstand string `xml:"mustUnderstand,attr"`
+}
+
+type Security struct {
+ Text string `xml:",chardata"`
+ MustUnderstand string `xml:"mustUnderstand,attr"`
+ O string `xml:"o,attr"`
+ Timestamp Timestamp `xml:"Timestamp"`
+}
+
+type Timestamp struct {
+ Text string `xml:",chardata"`
+ ID string `xml:"Id,attr"`
+ Created Text `xml:"Created"`
+ Expires Text `xml:"Expires"`
+}
+
+type Text struct {
+ Text string `xml:",chardata"`
+}
+
+type Body struct {
+ Text string `xml:",chardata"`
+ RequestSecurityTokenResponseCollection RequestSecurityTokenResponseCollection `xml:"RequestSecurityTokenResponseCollection"`
+}
+
+type RequestSecurityTokenResponseCollection struct {
+ Text string `xml:",chardata"`
+ Trust string `xml:"trust,attr"`
+ RequestSecurityTokenResponse []RequestSecurityTokenResponse `xml:"RequestSecurityTokenResponse"`
+}
+
+type RequestSecurityTokenResponse struct {
+ Text string `xml:",chardata"`
+ Lifetime Lifetime `xml:"Lifetime"`
+ AppliesTo AppliesTo `xml:"AppliesTo"`
+ RequestedSecurityToken RequestedSecurityToken `xml:"RequestedSecurityToken"`
+ RequestedAttachedReference RequestedAttachedReference `xml:"RequestedAttachedReference"`
+ RequestedUnattachedReference RequestedUnattachedReference `xml:"RequestedUnattachedReference"`
+ TokenType Text `xml:"TokenType"`
+ RequestType Text `xml:"RequestType"`
+ KeyType Text `xml:"KeyType"`
+}
+
+type Lifetime struct {
+ Text string `xml:",chardata"`
+ Created WSUTimestamp `xml:"Created"`
+ Expires WSUTimestamp `xml:"Expires"`
+}
+
+type WSUTimestamp struct {
+ Text string `xml:",chardata"`
+ Wsu string `xml:"wsu,attr"`
+}
+
+type AppliesTo struct {
+ Text string `xml:",chardata"`
+ Wsp string `xml:"wsp,attr"`
+ EndpointReference EndpointReference `xml:"EndpointReference"`
+}
+
+type EndpointReference struct {
+ Text string `xml:",chardata"`
+ Wsa string `xml:"wsa,attr"`
+ Address Text `xml:"Address"`
+}
+
+type RequestedSecurityToken struct {
+ Text string `xml:",chardata"`
+ AssertionRawXML string `xml:",innerxml"`
+ Assertion Assertion `xml:"Assertion"`
+}
+
+type Assertion struct {
+ XMLName xml.Name // Normally its `xml:"Assertion"`, but I think they want to capture the xmlns
+ Text string `xml:",chardata"`
+ MajorVersion string `xml:"MajorVersion,attr"`
+ MinorVersion string `xml:"MinorVersion,attr"`
+ AssertionID string `xml:"AssertionID,attr"`
+ Issuer string `xml:"Issuer,attr"`
+ IssueInstant string `xml:"IssueInstant,attr"`
+ Saml string `xml:"saml,attr"`
+ Conditions Conditions `xml:"Conditions"`
+ AttributeStatement AttributeStatement `xml:"AttributeStatement"`
+ AuthenticationStatement AuthenticationStatement `xml:"AuthenticationStatement"`
+ Signature Signature `xml:"Signature"`
+}
+
+type Conditions struct {
+ Text string `xml:",chardata"`
+ NotBefore string `xml:"NotBefore,attr"`
+ NotOnOrAfter string `xml:"NotOnOrAfter,attr"`
+ AudienceRestrictionCondition AudienceRestrictionCondition `xml:"AudienceRestrictionCondition"`
+}
+
+type AudienceRestrictionCondition struct {
+ Text string `xml:",chardata"`
+ Audience Text `xml:"Audience"`
+}
+
+type AttributeStatement struct {
+ Text string `xml:",chardata"`
+ Subject Subject `xml:"Subject"`
+ Attribute []Attribute `xml:"Attribute"`
+}
+
+type Subject struct {
+ Text string `xml:",chardata"`
+ NameIdentifier NameIdentifier `xml:"NameIdentifier"`
+ SubjectConfirmation SubjectConfirmation `xml:"SubjectConfirmation"`
+}
+
+type NameIdentifier struct {
+ Text string `xml:",chardata"`
+ Format string `xml:"Format,attr"`
+}
+
+type SubjectConfirmation struct {
+ Text string `xml:",chardata"`
+ ConfirmationMethod Text `xml:"ConfirmationMethod"`
+}
+
+type Attribute struct {
+ Text string `xml:",chardata"`
+ AttributeName string `xml:"AttributeName,attr"`
+ AttributeNamespace string `xml:"AttributeNamespace,attr"`
+ AttributeValue Text `xml:"AttributeValue"`
+}
+
+type AuthenticationStatement struct {
+ Text string `xml:",chardata"`
+ AuthenticationMethod string `xml:"AuthenticationMethod,attr"`
+ AuthenticationInstant string `xml:"AuthenticationInstant,attr"`
+ Subject Subject `xml:"Subject"`
+}
+
+type Signature struct {
+ Text string `xml:",chardata"`
+ Ds string `xml:"ds,attr"`
+ SignedInfo SignedInfo `xml:"SignedInfo"`
+ SignatureValue Text `xml:"SignatureValue"`
+ KeyInfo KeyInfo `xml:"KeyInfo"`
+}
+
+type SignedInfo struct {
+ Text string `xml:",chardata"`
+ CanonicalizationMethod Method `xml:"CanonicalizationMethod"`
+ SignatureMethod Method `xml:"SignatureMethod"`
+ Reference Reference `xml:"Reference"`
+}
+
+type Method struct {
+ Text string `xml:",chardata"`
+ Algorithm string `xml:"Algorithm,attr"`
+}
+
+type Reference struct {
+ Text string `xml:",chardata"`
+ URI string `xml:"URI,attr"`
+ Transforms Transforms `xml:"Transforms"`
+ DigestMethod Method `xml:"DigestMethod"`
+ DigestValue Text `xml:"DigestValue"`
+}
+
+type Transforms struct {
+ Text string `xml:",chardata"`
+ Transform []Method `xml:"Transform"`
+}
+
+type KeyInfo struct {
+ Text string `xml:",chardata"`
+ Xmlns string `xml:"xmlns,attr"`
+ X509Data X509Data `xml:"X509Data"`
+}
+
+type X509Data struct {
+ Text string `xml:",chardata"`
+ X509Certificate Text `xml:"X509Certificate"`
+}
+
+type RequestedAttachedReference struct {
+ Text string `xml:",chardata"`
+ SecurityTokenReference SecurityTokenReference `xml:"SecurityTokenReference"`
+}
+
+type SecurityTokenReference struct {
+ Text string `xml:",chardata"`
+ TokenType string `xml:"TokenType,attr"`
+ O string `xml:"o,attr"`
+ K string `xml:"k,attr"`
+ KeyIdentifier KeyIdentifier `xml:"KeyIdentifier"`
+}
+
+type KeyIdentifier struct {
+ Text string `xml:",chardata"`
+ ValueType string `xml:"ValueType,attr"`
+}
+
+type RequestedUnattachedReference struct {
+ Text string `xml:",chardata"`
+ SecurityTokenReference SecurityTokenReference `xml:"SecurityTokenReference"`
+}
@@ -0,0 +1,25 @@
+// Code generated by "stringer -type=Version"; DO NOT EDIT.
+
+package defs
+
+import "strconv"
+
+func _() {
+ // An "invalid array index" compiler error signifies that the constant values have changed.
+ // Re-run the stringer command to generate them again.
+ var x [1]struct{}
+ _ = x[TrustUnknown-0]
+ _ = x[Trust2005-1]
+ _ = x[Trust13-2]
+}
+
+const _Version_name = "TrustUnknownTrust2005Trust13"
+
+var _Version_index = [...]uint8{0, 12, 21, 28}
+
+func (i Version) String() string {
+ if i < 0 || i >= Version(len(_Version_index)-1) {
+ return "Version(" + strconv.FormatInt(int64(i), 10) + ")"
+ }
+ return _Version_name[_Version_index[i]:_Version_index[i+1]]
+}
@@ -0,0 +1,199 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package defs
+
+import (
+ "encoding/xml"
+ "fmt"
+ "time"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ uuid "github.com/google/uuid"
+)
+
+//go:generate stringer -type=Version
+
+type Version int
+
+const (
+ TrustUnknown Version = iota
+ Trust2005
+ Trust13
+)
+
+// Endpoint represents a WSTrust endpoint.
+type Endpoint struct {
+ // Version is the version of the endpoint.
+ Version Version
+ // URL is the URL of the endpoint.
+ URL string
+}
+
+type wsTrustTokenRequestEnvelope struct {
+ XMLName xml.Name `xml:"s:Envelope"`
+ Text string `xml:",chardata"`
+ S string `xml:"xmlns:s,attr"`
+ Wsa string `xml:"xmlns:wsa,attr"`
+ Wsu string `xml:"xmlns:wsu,attr"`
+ Header struct {
+ Text string `xml:",chardata"`
+ Action struct {
+ Text string `xml:",chardata"`
+ MustUnderstand string `xml:"s:mustUnderstand,attr"`
+ } `xml:"wsa:Action"`
+ MessageID struct {
+ Text string `xml:",chardata"`
+ } `xml:"wsa:messageID"`
+ ReplyTo struct {
+ Text string `xml:",chardata"`
+ Address struct {
+ Text string `xml:",chardata"`
+ } `xml:"wsa:Address"`
+ } `xml:"wsa:ReplyTo"`
+ To struct {
+ Text string `xml:",chardata"`
+ MustUnderstand string `xml:"s:mustUnderstand,attr"`
+ } `xml:"wsa:To"`
+ Security struct {
+ Text string `xml:",chardata"`
+ MustUnderstand string `xml:"s:mustUnderstand,attr"`
+ Wsse string `xml:"xmlns:wsse,attr"`
+ Timestamp struct {
+ Text string `xml:",chardata"`
+ ID string `xml:"wsu:Id,attr"`
+ Created struct {
+ Text string `xml:",chardata"`
+ } `xml:"wsu:Created"`
+ Expires struct {
+ Text string `xml:",chardata"`
+ } `xml:"wsu:Expires"`
+ } `xml:"wsu:Timestamp"`
+ UsernameToken struct {
+ Text string `xml:",chardata"`
+ ID string `xml:"wsu:Id,attr"`
+ Username struct {
+ Text string `xml:",chardata"`
+ } `xml:"wsse:Username"`
+ Password struct {
+ Text string `xml:",chardata"`
+ } `xml:"wsse:Password"`
+ } `xml:"wsse:UsernameToken"`
+ } `xml:"wsse:Security"`
+ } `xml:"s:Header"`
+ Body struct {
+ Text string `xml:",chardata"`
+ RequestSecurityToken struct {
+ Text string `xml:",chardata"`
+ Wst string `xml:"xmlns:wst,attr"`
+ AppliesTo struct {
+ Text string `xml:",chardata"`
+ Wsp string `xml:"xmlns:wsp,attr"`
+ EndpointReference struct {
+ Text string `xml:",chardata"`
+ Address struct {
+ Text string `xml:",chardata"`
+ } `xml:"wsa:Address"`
+ } `xml:"wsa:EndpointReference"`
+ } `xml:"wsp:AppliesTo"`
+ KeyType struct {
+ Text string `xml:",chardata"`
+ } `xml:"wst:KeyType"`
+ RequestType struct {
+ Text string `xml:",chardata"`
+ } `xml:"wst:RequestType"`
+ } `xml:"wst:RequestSecurityToken"`
+ } `xml:"s:Body"`
+}
+
+func buildTimeString(t time.Time) string {
+ // Golang time formats are weird: https://stackoverflow.com/questions/20234104/how-to-format-current-time-using-a-yyyymmddhhmmss-format
+ return t.Format("2006-01-02T15:04:05.000Z")
+}
+
+func (wte *Endpoint) buildTokenRequestMessage(authType authority.AuthorizeType, cloudAudienceURN string, username string, password string) (string, error) {
+ var soapAction string
+ var trustNamespace string
+ var keyType string
+ var requestType string
+
+ createdTime := time.Now().UTC()
+ expiresTime := createdTime.Add(10 * time.Minute)
+
+ switch wte.Version {
+ case Trust2005:
+ soapAction = trust2005Spec
+ trustNamespace = "http://schemas.xmlsoap.org/ws/2005/02/trust"
+ keyType = "http://schemas.xmlsoap.org/ws/2005/05/identity/NoProofKey"
+ requestType = "http://schemas.xmlsoap.org/ws/2005/02/trust/Issue"
+ case Trust13:
+ soapAction = trust13Spec
+ trustNamespace = "http://docs.oasis-open.org/ws-sx/ws-trust/200512"
+ keyType = "http://docs.oasis-open.org/ws-sx/ws-trust/200512/Bearer"
+ requestType = "http://docs.oasis-open.org/ws-sx/ws-trust/200512/Issue"
+ default:
+ return "", fmt.Errorf("buildTokenRequestMessage had Version == %q, which is not recognized", wte.Version)
+ }
+
+ var envelope wsTrustTokenRequestEnvelope
+
+ messageUUID := uuid.New()
+
+ envelope.S = "http://www.w3.org/2003/05/soap-envelope"
+ envelope.Wsa = "http://www.w3.org/2005/08/addressing"
+ envelope.Wsu = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd"
+
+ envelope.Header.Action.MustUnderstand = "1"
+ envelope.Header.Action.Text = soapAction
+ envelope.Header.MessageID.Text = "urn:uuid:" + messageUUID.String()
+ envelope.Header.ReplyTo.Address.Text = "http://www.w3.org/2005/08/addressing/anonymous"
+ envelope.Header.To.MustUnderstand = "1"
+ envelope.Header.To.Text = wte.URL
+
+ switch authType {
+ case authority.ATUnknown:
+ return "", fmt.Errorf("buildTokenRequestMessage had no authority type(%v)", authType)
+ case authority.ATUsernamePassword:
+ endpointUUID := uuid.New()
+
+ var trustID string
+ if wte.Version == Trust2005 {
+ trustID = "UnPwSecTok2005-" + endpointUUID.String()
+ } else {
+ trustID = "UnPwSecTok13-" + endpointUUID.String()
+ }
+
+ envelope.Header.Security.MustUnderstand = "1"
+ envelope.Header.Security.Wsse = "http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd"
+ envelope.Header.Security.Timestamp.ID = "MSATimeStamp"
+ envelope.Header.Security.Timestamp.Created.Text = buildTimeString(createdTime)
+ envelope.Header.Security.Timestamp.Expires.Text = buildTimeString(expiresTime)
+ envelope.Header.Security.UsernameToken.ID = trustID
+ envelope.Header.Security.UsernameToken.Username.Text = username
+ envelope.Header.Security.UsernameToken.Password.Text = password
+ default:
+ // This is just to note that we don't do anything for other cases.
+ // We aren't missing anything I know of.
+ }
+
+ envelope.Body.RequestSecurityToken.Wst = trustNamespace
+ envelope.Body.RequestSecurityToken.AppliesTo.Wsp = "http://schemas.xmlsoap.org/ws/2004/09/policy"
+ envelope.Body.RequestSecurityToken.AppliesTo.EndpointReference.Address.Text = cloudAudienceURN
+ envelope.Body.RequestSecurityToken.KeyType.Text = keyType
+ envelope.Body.RequestSecurityToken.RequestType.Text = requestType
+
+ output, err := xml.Marshal(envelope)
+ if err != nil {
+ return "", err
+ }
+
+ return string(output), nil
+}
+
+func (wte *Endpoint) BuildTokenRequestMessageWIA(cloudAudienceURN string) (string, error) {
+ return wte.buildTokenRequestMessage(authority.ATWindowsIntegrated, cloudAudienceURN, "", "")
+}
+
+func (wte *Endpoint) BuildTokenRequestMessageUsernamePassword(cloudAudienceURN string, username string, password string) (string, error) {
+ return wte.buildTokenRequestMessage(authority.ATUsernamePassword, cloudAudienceURN, username, password)
+}
@@ -0,0 +1,159 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package defs
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+)
+
+//go:generate stringer -type=endpointType
+
+type endpointType int
+
+const (
+ etUnknown endpointType = iota
+ etUsernamePassword
+ etWindowsTransport
+)
+
+type wsEndpointData struct {
+ Version Version
+ EndpointType endpointType
+}
+
+const trust13Spec string = "http://docs.oasis-open.org/ws-sx/ws-trust/200512/RST/Issue"
+const trust2005Spec string = "http://schemas.xmlsoap.org/ws/2005/02/trust/RST/Issue"
+
+type MexDocument struct {
+ UsernamePasswordEndpoint Endpoint
+ WindowsTransportEndpoint Endpoint
+ policies map[string]endpointType
+ bindings map[string]wsEndpointData
+}
+
+func updateEndpoint(cached *Endpoint, found Endpoint) {
+ if cached == nil || cached.Version == TrustUnknown {
+ *cached = found
+ return
+ }
+ if (*cached).Version == Trust2005 && found.Version == Trust13 {
+ *cached = found
+ return
+ }
+}
+
+// TODO(msal): Someone needs to write tests for everything below.
+
+// NewFromDef creates a new MexDocument.
+func NewFromDef(defs Definitions) (MexDocument, error) {
+ policies, err := policies(defs)
+ if err != nil {
+ return MexDocument{}, err
+ }
+
+ bindings, err := bindings(defs, policies)
+ if err != nil {
+ return MexDocument{}, err
+ }
+
+ userPass, windows, err := endpoints(defs, bindings)
+ if err != nil {
+ return MexDocument{}, err
+ }
+
+ return MexDocument{
+ UsernamePasswordEndpoint: userPass,
+ WindowsTransportEndpoint: windows,
+ policies: policies,
+ bindings: bindings,
+ }, nil
+}
+
+func policies(defs Definitions) (map[string]endpointType, error) {
+ policies := make(map[string]endpointType, len(defs.Policy))
+
+ for _, policy := range defs.Policy {
+ if policy.ExactlyOne.All.NegotiateAuthentication.XMLName.Local != "" {
+ if policy.ExactlyOne.All.TransportBinding.SP != "" && policy.ID != "" {
+ policies["#"+policy.ID] = etWindowsTransport
+ }
+ }
+
+ if policy.ExactlyOne.All.SignedEncryptedSupportingTokens.Policy.UsernameToken.Policy.WSSUsernameToken10.XMLName.Local != "" {
+ if policy.ExactlyOne.All.TransportBinding.SP != "" && policy.ID != "" {
+ policies["#"+policy.ID] = etUsernamePassword
+ }
+ }
+ if policy.ExactlyOne.All.SignedSupportingTokens.Policy.UsernameToken.Policy.WSSUsernameToken10.XMLName.Local != "" {
+ if policy.ExactlyOne.All.TransportBinding.SP != "" && policy.ID != "" {
+ policies["#"+policy.ID] = etUsernamePassword
+ }
+ }
+ }
+
+ if len(policies) == 0 {
+ return policies, errors.New("no policies for mex document")
+ }
+
+ return policies, nil
+}
+
+func bindings(defs Definitions, policies map[string]endpointType) (map[string]wsEndpointData, error) {
+ bindings := make(map[string]wsEndpointData, len(defs.Binding))
+
+ for _, binding := range defs.Binding {
+ policyName := binding.PolicyReference.URI
+ transport := binding.Binding.Transport
+
+ if transport == "http://schemas.xmlsoap.org/soap/http" {
+ if policy, ok := policies[policyName]; ok {
+ bindingName := binding.Name
+ specVersion := binding.Operation.Operation.SoapAction
+
+ if specVersion == trust13Spec {
+ bindings[bindingName] = wsEndpointData{Trust13, policy}
+ } else if specVersion == trust2005Spec {
+ bindings[bindingName] = wsEndpointData{Trust2005, policy}
+ } else {
+ return nil, errors.New("found unknown spec version in mex document")
+ }
+ }
+ }
+ }
+ return bindings, nil
+}
+
+func endpoints(defs Definitions, bindings map[string]wsEndpointData) (userPass, windows Endpoint, err error) {
+ for _, port := range defs.Service.Port {
+ bindingName := port.Binding
+
+ index := strings.Index(bindingName, ":")
+ if index != -1 {
+ bindingName = bindingName[index+1:]
+ }
+
+ if binding, ok := bindings[bindingName]; ok {
+ url := strings.TrimSpace(port.EndpointReference.Address.Text)
+ if url == "" {
+ return Endpoint{}, Endpoint{}, fmt.Errorf("MexDocument cannot have blank URL endpoint")
+ }
+ if binding.Version == TrustUnknown {
+ return Endpoint{}, Endpoint{}, fmt.Errorf("endpoint version unknown")
+ }
+ endpoint := Endpoint{Version: binding.Version, URL: url}
+
+ switch binding.EndpointType {
+ case etUsernamePassword:
+ updateEndpoint(&userPass, endpoint)
+ case etWindowsTransport:
+ updateEndpoint(&windows, endpoint)
+ default:
+ return Endpoint{}, Endpoint{}, errors.New("found unknown port type in MEX document")
+ }
+ }
+ }
+ return userPass, windows, nil
+}
@@ -0,0 +1,136 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+/*
+Package wstrust provides a client for communicating with a WSTrust (https://en.wikipedia.org/wiki/WS-Trust#:~:text=WS%2DTrust%20is%20a%20WS,in%20a%20secure%20message%20exchange.)
+for the purposes of extracting metadata from the service. This data can be used to acquire
+tokens using the accesstokens.Client.GetAccessTokenFromSamlGrant() call.
+*/
+package wstrust
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/internal/grant"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/wstrust/defs"
+)
+
+type xmlCaller interface {
+ XMLCall(ctx context.Context, endpoint string, headers http.Header, qv url.Values, resp interface{}) error
+ SOAPCall(ctx context.Context, endpoint, action string, headers http.Header, qv url.Values, body string, resp interface{}) error
+}
+
+type SamlTokenInfo struct {
+ AssertionType string // Should be either constants SAMLV1Grant or SAMLV2Grant.
+ Assertion string
+}
+
+// Client represents the REST calls to get tokens from token generator backends.
+type Client struct {
+ // Comm provides the HTTP transport client.
+ Comm xmlCaller
+}
+
+// TODO(msal): This allows me to call Mex without having a real Def file on line 45.
+// This would fail because policies() would not find a policy. This is easy enough to
+// fix in test data, but.... Definitions is defined with built in structs. That needs
+// to be pulled apart and until then I have this hack in.
+var newFromDef = defs.NewFromDef
+
+// Mex provides metadata about a wstrust service.
+func (c Client) Mex(ctx context.Context, federationMetadataURL string) (defs.MexDocument, error) {
+ resp := defs.Definitions{}
+ err := c.Comm.XMLCall(
+ ctx,
+ federationMetadataURL,
+ http.Header{},
+ nil,
+ &resp,
+ )
+ if err != nil {
+ return defs.MexDocument{}, err
+ }
+
+ return newFromDef(resp)
+}
+
+const (
+ SoapActionDefault = "http://docs.oasis-open.org/ws-sx/ws-trust/200512/RST/Issue"
+
+ // Note: Commented out because this action is not supported. It was in the original code
+ // but only used in a switch where it errored. Since there was only one value, a default
+ // worked better. However, buildTokenRequestMessage() had 2005 support. I'm not actually
+ // sure what's going on here. It like we have half support. For now this is here just
+ // for documentation purposes in case we are going to add support.
+ //
+ // SoapActionWSTrust2005 = "http://schemas.xmlsoap.org/ws/2005/02/trust/RST/Issue"
+)
+
+// SAMLTokenInfo provides SAML information that is used to generate a SAML token.
+func (c Client) SAMLTokenInfo(ctx context.Context, authParameters authority.AuthParams, cloudAudienceURN string, endpoint defs.Endpoint) (SamlTokenInfo, error) {
+ var wsTrustRequestMessage string
+ var err error
+
+ switch authParameters.AuthorizationType {
+ case authority.ATWindowsIntegrated:
+ wsTrustRequestMessage, err = endpoint.BuildTokenRequestMessageWIA(cloudAudienceURN)
+ if err != nil {
+ return SamlTokenInfo{}, err
+ }
+ case authority.ATUsernamePassword:
+ wsTrustRequestMessage, err = endpoint.BuildTokenRequestMessageUsernamePassword(
+ cloudAudienceURN, authParameters.Username, authParameters.Password)
+ if err != nil {
+ return SamlTokenInfo{}, err
+ }
+ default:
+ return SamlTokenInfo{}, fmt.Errorf("unknown auth type %v", authParameters.AuthorizationType)
+ }
+
+ var soapAction string
+ switch endpoint.Version {
+ case defs.Trust13:
+ soapAction = SoapActionDefault
+ case defs.Trust2005:
+ return SamlTokenInfo{}, errors.New("WS Trust 2005 support is not implemented")
+ default:
+ return SamlTokenInfo{}, fmt.Errorf("the SOAP endpoint for a wstrust call had an invalid version: %v", endpoint.Version)
+ }
+
+ resp := defs.SAMLDefinitions{}
+ err = c.Comm.SOAPCall(ctx, endpoint.URL, soapAction, http.Header{}, nil, wsTrustRequestMessage, &resp)
+ if err != nil {
+ return SamlTokenInfo{}, err
+ }
+
+ return c.samlAssertion(resp)
+}
+
+const (
+ samlv1Assertion = "urn:oasis:names:tc:SAML:1.0:assertion"
+ samlv2Assertion = "urn:oasis:names:tc:SAML:2.0:assertion"
+)
+
+func (c Client) samlAssertion(def defs.SAMLDefinitions) (SamlTokenInfo, error) {
+ for _, tokenResponse := range def.Body.RequestSecurityTokenResponseCollection.RequestSecurityTokenResponse {
+ token := tokenResponse.RequestedSecurityToken
+ if token.Assertion.XMLName.Local != "" {
+ assertion := token.AssertionRawXML
+
+ samlVersion := token.Assertion.Saml
+ switch samlVersion {
+ case samlv1Assertion:
+ return SamlTokenInfo{AssertionType: grant.SAMLV1, Assertion: assertion}, nil
+ case samlv2Assertion:
+ return SamlTokenInfo{AssertionType: grant.SAMLV2, Assertion: assertion}, nil
+ }
+ return SamlTokenInfo{}, fmt.Errorf("couldn't parse SAML assertion, version unknown: %q", samlVersion)
+ }
+ }
+ return SamlTokenInfo{}, errors.New("unknown WS-Trust version")
+}
@@ -0,0 +1,149 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// TODO(msal): Write some tests. The original code this came from didn't have tests and I'm too
+// tired at this point to do it. It, like many other *Manager code I found was broken because
+// they didn't have mutex protection.
+
+package oauth
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strings"
+ "sync"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+)
+
+// ADFS is an active directory federation service authority type.
+const ADFS = "ADFS"
+
+type cacheEntry struct {
+ Endpoints authority.Endpoints
+ ValidForDomainsInList map[string]bool
+}
+
+func createcacheEntry(endpoints authority.Endpoints) cacheEntry {
+ return cacheEntry{endpoints, map[string]bool{}}
+}
+
+// AuthorityEndpoint retrieves endpoints from an authority for auth and token acquisition.
+type authorityEndpoint struct {
+ rest *ops.REST
+
+ mu sync.Mutex
+ cache map[string]cacheEntry
+}
+
+// newAuthorityEndpoint is the constructor for AuthorityEndpoint.
+func newAuthorityEndpoint(rest *ops.REST) *authorityEndpoint {
+ m := &authorityEndpoint{rest: rest, cache: map[string]cacheEntry{}}
+ return m
+}
+
+// ResolveEndpoints gets the authorization and token endpoints and creates an AuthorityEndpoints instance
+func (m *authorityEndpoint) ResolveEndpoints(ctx context.Context, authorityInfo authority.Info, userPrincipalName string) (authority.Endpoints, error) {
+
+ if endpoints, found := m.cachedEndpoints(authorityInfo, userPrincipalName); found {
+ return endpoints, nil
+ }
+
+ endpoint, err := m.openIDConfigurationEndpoint(ctx, authorityInfo, userPrincipalName)
+ if err != nil {
+ return authority.Endpoints{}, err
+ }
+
+ resp, err := m.rest.Authority().GetTenantDiscoveryResponse(ctx, endpoint)
+ if err != nil {
+ return authority.Endpoints{}, err
+ }
+ if err := resp.Validate(); err != nil {
+ return authority.Endpoints{}, fmt.Errorf("ResolveEndpoints(): %w", err)
+ }
+
+ tenant := authorityInfo.Tenant
+
+ endpoints := authority.NewEndpoints(
+ strings.Replace(resp.AuthorizationEndpoint, "{tenant}", tenant, -1),
+ strings.Replace(resp.TokenEndpoint, "{tenant}", tenant, -1),
+ strings.Replace(resp.Issuer, "{tenant}", tenant, -1),
+ authorityInfo.Host)
+
+ m.addCachedEndpoints(authorityInfo, userPrincipalName, endpoints)
+
+ return endpoints, nil
+}
+
+// cachedEndpoints returns a the cached endpoints if they exists. If not, we return false.
+func (m *authorityEndpoint) cachedEndpoints(authorityInfo authority.Info, userPrincipalName string) (authority.Endpoints, bool) {
+ m.mu.Lock()
+ defer m.mu.Unlock()
+
+ if cacheEntry, ok := m.cache[authorityInfo.CanonicalAuthorityURI]; ok {
+ if authorityInfo.AuthorityType == ADFS {
+ domain, err := adfsDomainFromUpn(userPrincipalName)
+ if err == nil {
+ if _, ok := cacheEntry.ValidForDomainsInList[domain]; ok {
+ return cacheEntry.Endpoints, true
+ }
+ }
+ }
+ return cacheEntry.Endpoints, true
+ }
+ return authority.Endpoints{}, false
+}
+
+func (m *authorityEndpoint) addCachedEndpoints(authorityInfo authority.Info, userPrincipalName string, endpoints authority.Endpoints) {
+ m.mu.Lock()
+ defer m.mu.Unlock()
+
+ updatedCacheEntry := createcacheEntry(endpoints)
+
+ if authorityInfo.AuthorityType == ADFS {
+ // Since we're here, we've made a call to the backend. We want to ensure we're caching
+ // the latest values from the server.
+ if cacheEntry, ok := m.cache[authorityInfo.CanonicalAuthorityURI]; ok {
+ for k := range cacheEntry.ValidForDomainsInList {
+ updatedCacheEntry.ValidForDomainsInList[k] = true
+ }
+ }
+ domain, err := adfsDomainFromUpn(userPrincipalName)
+ if err == nil {
+ updatedCacheEntry.ValidForDomainsInList[domain] = true
+ }
+ }
+
+ m.cache[authorityInfo.CanonicalAuthorityURI] = updatedCacheEntry
+}
+
+func (m *authorityEndpoint) openIDConfigurationEndpoint(ctx context.Context, authorityInfo authority.Info, userPrincipalName string) (string, error) {
+ if authorityInfo.Tenant == "adfs" {
+ return fmt.Sprintf("https://%s/adfs/.well-known/openid-configuration", authorityInfo.Host), nil
+ } else if authorityInfo.ValidateAuthority && !authority.TrustedHost(authorityInfo.Host) {
+ resp, err := m.rest.Authority().AADInstanceDiscovery(ctx, authorityInfo)
+ if err != nil {
+ return "", err
+ }
+ return resp.TenantDiscoveryEndpoint, nil
+ } else if authorityInfo.Region != "" {
+ resp, err := m.rest.Authority().AADInstanceDiscovery(ctx, authorityInfo)
+ if err != nil {
+ return "", err
+ }
+ return resp.TenantDiscoveryEndpoint, nil
+
+ }
+
+ return authorityInfo.CanonicalAuthorityURI + "v2.0/.well-known/openid-configuration", nil
+}
+
+func adfsDomainFromUpn(userPrincipalName string) (string, error) {
+ parts := strings.Split(userPrincipalName, "@")
+ if len(parts) < 2 {
+ return "", errors.New("no @ present in user principal name")
+ }
+ return parts[1], nil
+}
@@ -0,0 +1,52 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package options
+
+import (
+ "errors"
+ "fmt"
+)
+
+// CallOption implements an optional argument to a method call. See
+// https://blog.devgenius.io/go-call-option-that-can-be-used-with-multiple-methods-6c81734f3dbe
+// for an explanation of the usage pattern.
+type CallOption interface {
+ Do(any) error
+ callOption()
+}
+
+// ApplyOptions applies all the callOptions to options. options must be a pointer to a struct and
+// callOptions must be a list of objects that implement CallOption.
+func ApplyOptions[O, C any](options O, callOptions []C) error {
+ for _, o := range callOptions {
+ if t, ok := any(o).(CallOption); !ok {
+ return fmt.Errorf("unexpected option type %T", o)
+ } else if err := t.Do(options); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// NewCallOption returns a new CallOption whose Do() method calls function "f".
+func NewCallOption(f func(any) error) CallOption {
+ if f == nil {
+ // This isn't a practical concern because only an MSAL maintainer can get
+ // us here, by implementing a do-nothing option. But if someone does that,
+ // the below ensures the method invoked with the option returns an error.
+ return callOption(func(any) error {
+ return errors.New("invalid option: missing implementation")
+ })
+ }
+ return callOption(f)
+}
+
+// callOption is an adapter for a function to a CallOption
+type callOption func(any) error
+
+func (c callOption) Do(a any) error {
+ return c(a)
+}
+
+func (callOption) callOption() {}
@@ -0,0 +1,72 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+package shared
+
+import (
+ "net/http"
+ "reflect"
+ "strings"
+)
+
+const (
+ // CacheKeySeparator is used in creating the keys of the cache.
+ CacheKeySeparator = "-"
+)
+
+type Account struct {
+ HomeAccountID string `json:"home_account_id,omitempty"`
+ Environment string `json:"environment,omitempty"`
+ Realm string `json:"realm,omitempty"`
+ LocalAccountID string `json:"local_account_id,omitempty"`
+ AuthorityType string `json:"authority_type,omitempty"`
+ PreferredUsername string `json:"username,omitempty"`
+ GivenName string `json:"given_name,omitempty"`
+ FamilyName string `json:"family_name,omitempty"`
+ MiddleName string `json:"middle_name,omitempty"`
+ Name string `json:"name,omitempty"`
+ AlternativeID string `json:"alternative_account_id,omitempty"`
+ RawClientInfo string `json:"client_info,omitempty"`
+ UserAssertionHash string `json:"user_assertion_hash,omitempty"`
+
+ AdditionalFields map[string]interface{}
+}
+
+// NewAccount creates an account.
+func NewAccount(homeAccountID, env, realm, localAccountID, authorityType, username string) Account {
+ return Account{
+ HomeAccountID: homeAccountID,
+ Environment: env,
+ Realm: realm,
+ LocalAccountID: localAccountID,
+ AuthorityType: authorityType,
+ PreferredUsername: username,
+ }
+}
+
+// Key creates the key for storing accounts in the cache.
+func (acc Account) Key() string {
+ key := strings.Join([]string{acc.HomeAccountID, acc.Environment, acc.Realm}, CacheKeySeparator)
+ return strings.ToLower(key)
+}
+
+// IsZero checks the zero value of account.
+func (acc Account) IsZero() bool {
+ v := reflect.ValueOf(acc)
+ for i := 0; i < v.NumField(); i++ {
+ field := v.Field(i)
+ if !field.IsZero() {
+ switch field.Kind() {
+ case reflect.Map, reflect.Slice:
+ if field.Len() == 0 {
+ continue
+ }
+ }
+ return false
+ }
+ }
+ return true
+}
+
+// DefaultClient is our default shared HTTP client.
+var DefaultClient = &http.Client{}
@@ -0,0 +1,8 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+// Package version keeps the version number of the client package.
+package version
+
+// Version is the version of this client package that is communicated to the server.
+const Version = "1.2.0"
@@ -0,0 +1,756 @@
+// Copyright (c) Microsoft Corporation.
+// Licensed under the MIT license.
+
+/*
+Package public provides a client for authentication of "public" applications. A "public"
+application is defined as an app that runs on client devices (android, ios, windows, linux, ...).
+These devices are "untrusted" and access resources via web APIs that must authenticate.
+*/
+package public
+
+/*
+Design note:
+
+public.Client uses client.Base as an embedded type. client.Base statically assigns its attributes
+during creation. As it doesn't have any pointers in it, anything borrowed from it, such as
+Base.AuthParams is a copy that is free to be manipulated here.
+*/
+
+// TODO(msal): This should have example code for each method on client using Go's example doc framework.
+// base usage details should be includee in the package documentation.
+
+import (
+ "context"
+ "crypto/rand"
+ "crypto/sha256"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "net/url"
+ "reflect"
+ "strconv"
+
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/cache"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/base"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/local"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/accesstokens"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/oauth/ops/authority"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/options"
+ "github.com/AzureAD/microsoft-authentication-library-for-go/apps/internal/shared"
+ "github.com/google/uuid"
+ "github.com/pkg/browser"
+)
+
+// AuthResult contains the results of one token acquisition operation.
+// For details see https://aka.ms/msal-net-authenticationresult
+type AuthResult = base.AuthResult
+
+type AuthenticationScheme = authority.AuthenticationScheme
+
+type Account = shared.Account
+
+var errNoAccount = errors.New("no account was specified with public.WithSilentAccount(), or the specified account is invalid")
+
+// clientOptions configures the Client's behavior.
+type clientOptions struct {
+ accessor cache.ExportReplace
+ authority string
+ capabilities []string
+ disableInstanceDiscovery bool
+ httpClient ops.HTTPClient
+}
+
+func (p *clientOptions) validate() error {
+ u, err := url.Parse(p.authority)
+ if err != nil {
+ return fmt.Errorf("Authority options cannot be URL parsed: %w", err)
+ }
+ if u.Scheme != "https" {
+ return fmt.Errorf("Authority(%s) did not start with https://", u.String())
+ }
+ return nil
+}
+
+// Option is an optional argument to the New constructor.
+type Option func(o *clientOptions)
+
+// WithAuthority allows for a custom authority to be set. This must be a valid https url.
+func WithAuthority(authority string) Option {
+ return func(o *clientOptions) {
+ o.authority = authority
+ }
+}
+
+// WithCache provides an accessor that will read and write authentication data to an externally managed cache.
+func WithCache(accessor cache.ExportReplace) Option {
+ return func(o *clientOptions) {
+ o.accessor = accessor
+ }
+}
+
+// WithClientCapabilities allows configuring one or more client capabilities such as "CP1"
+func WithClientCapabilities(capabilities []string) Option {
+ return func(o *clientOptions) {
+ // there's no danger of sharing the slice's underlying memory with the application because
+ // this slice is simply passed to base.WithClientCapabilities, which copies its data
+ o.capabilities = capabilities
+ }
+}
+
+// WithHTTPClient allows for a custom HTTP client to be set.
+func WithHTTPClient(httpClient ops.HTTPClient) Option {
+ return func(o *clientOptions) {
+ o.httpClient = httpClient
+ }
+}
+
+// WithInstanceDiscovery set to false to disable authority validation (to support private cloud scenarios)
+func WithInstanceDiscovery(enabled bool) Option {
+ return func(o *clientOptions) {
+ o.disableInstanceDiscovery = !enabled
+ }
+}
+
+// Client is a representation of authentication client for public applications as defined in the
+// package doc. For more information, visit https://docs.microsoft.com/azure/active-directory/develop/msal-client-applications.
+type Client struct {
+ base base.Client
+}
+
+// New is the constructor for Client.
+func New(clientID string, options ...Option) (Client, error) {
+ opts := clientOptions{
+ authority: base.AuthorityPublicCloud,
+ httpClient: shared.DefaultClient,
+ }
+
+ for _, o := range options {
+ o(&opts)
+ }
+ if err := opts.validate(); err != nil {
+ return Client{}, err
+ }
+
+ base, err := base.New(clientID, opts.authority, oauth.New(opts.httpClient), base.WithCacheAccessor(opts.accessor), base.WithClientCapabilities(opts.capabilities), base.WithInstanceDiscovery(!opts.disableInstanceDiscovery))
+ if err != nil {
+ return Client{}, err
+ }
+ return Client{base}, nil
+}
+
+// authCodeURLOptions contains options for AuthCodeURL
+type authCodeURLOptions struct {
+ claims, loginHint, tenantID, domainHint string
+}
+
+// AuthCodeURLOption is implemented by options for AuthCodeURL
+type AuthCodeURLOption interface {
+ authCodeURLOption()
+}
+
+// AuthCodeURL creates a URL used to acquire an authorization code.
+//
+// Options: [WithClaims], [WithDomainHint], [WithLoginHint], [WithTenantID]
+func (pca Client) AuthCodeURL(ctx context.Context, clientID, redirectURI string, scopes []string, opts ...AuthCodeURLOption) (string, error) {
+ o := authCodeURLOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return "", err
+ }
+ ap, err := pca.base.AuthParams.WithTenant(o.tenantID)
+ if err != nil {
+ return "", err
+ }
+ ap.Claims = o.claims
+ ap.LoginHint = o.loginHint
+ ap.DomainHint = o.domainHint
+ return pca.base.AuthCodeURL(ctx, clientID, redirectURI, scopes, ap)
+}
+
+// WithClaims sets additional claims to request for the token, such as those required by conditional access policies.
+// Use this option when Azure AD returned a claims challenge for a prior request. The argument must be decoded.
+// This option is valid for any token acquisition method.
+func WithClaims(claims string) interface {
+ AcquireByAuthCodeOption
+ AcquireByDeviceCodeOption
+ AcquireByUsernamePasswordOption
+ AcquireInteractiveOption
+ AcquireSilentOption
+ AuthCodeURLOption
+ options.CallOption
+} {
+ return struct {
+ AcquireByAuthCodeOption
+ AcquireByDeviceCodeOption
+ AcquireByUsernamePasswordOption
+ AcquireInteractiveOption
+ AcquireSilentOption
+ AuthCodeURLOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenByAuthCodeOptions:
+ t.claims = claims
+ case *acquireTokenByDeviceCodeOptions:
+ t.claims = claims
+ case *acquireTokenByUsernamePasswordOptions:
+ t.claims = claims
+ case *acquireTokenSilentOptions:
+ t.claims = claims
+ case *authCodeURLOptions:
+ t.claims = claims
+ case *interactiveAuthOptions:
+ t.claims = claims
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithAuthenticationScheme is an extensibility mechanism designed to be used only by Azure Arc for proof of possession access tokens.
+func WithAuthenticationScheme(authnScheme AuthenticationScheme) interface {
+ AcquireSilentOption
+ AcquireInteractiveOption
+ AcquireByUsernamePasswordOption
+ options.CallOption
+} {
+ return struct {
+ AcquireSilentOption
+ AcquireInteractiveOption
+ AcquireByUsernamePasswordOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenSilentOptions:
+ t.authnScheme = authnScheme
+ case *interactiveAuthOptions:
+ t.authnScheme = authnScheme
+ case *acquireTokenByUsernamePasswordOptions:
+ t.authnScheme = authnScheme
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithTenantID specifies a tenant for a single authentication. It may be different than the tenant set in [New] by [WithAuthority].
+// This option is valid for any token acquisition method.
+func WithTenantID(tenantID string) interface {
+ AcquireByAuthCodeOption
+ AcquireByDeviceCodeOption
+ AcquireByUsernamePasswordOption
+ AcquireInteractiveOption
+ AcquireSilentOption
+ AuthCodeURLOption
+ options.CallOption
+} {
+ return struct {
+ AcquireByAuthCodeOption
+ AcquireByDeviceCodeOption
+ AcquireByUsernamePasswordOption
+ AcquireInteractiveOption
+ AcquireSilentOption
+ AuthCodeURLOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenByAuthCodeOptions:
+ t.tenantID = tenantID
+ case *acquireTokenByDeviceCodeOptions:
+ t.tenantID = tenantID
+ case *acquireTokenByUsernamePasswordOptions:
+ t.tenantID = tenantID
+ case *acquireTokenSilentOptions:
+ t.tenantID = tenantID
+ case *authCodeURLOptions:
+ t.tenantID = tenantID
+ case *interactiveAuthOptions:
+ t.tenantID = tenantID
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// acquireTokenSilentOptions are all the optional settings to an AcquireTokenSilent() call.
+// These are set by using various AcquireTokenSilentOption functions.
+type acquireTokenSilentOptions struct {
+ account Account
+ claims, tenantID string
+ authnScheme AuthenticationScheme
+}
+
+// AcquireSilentOption is implemented by options for AcquireTokenSilent
+type AcquireSilentOption interface {
+ acquireSilentOption()
+}
+
+// WithSilentAccount uses the passed account during an AcquireTokenSilent() call.
+func WithSilentAccount(account Account) interface {
+ AcquireSilentOption
+ options.CallOption
+} {
+ return struct {
+ AcquireSilentOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenSilentOptions:
+ t.account = account
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// AcquireTokenSilent acquires a token from either the cache or using a refresh token.
+//
+// Options: [WithClaims], [WithSilentAccount], [WithTenantID]
+func (pca Client) AcquireTokenSilent(ctx context.Context, scopes []string, opts ...AcquireSilentOption) (AuthResult, error) {
+ o := acquireTokenSilentOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return AuthResult{}, err
+ }
+ // an account is required to find user tokens in the cache
+ if reflect.ValueOf(o.account).IsZero() {
+ return AuthResult{}, errNoAccount
+ }
+
+ silentParameters := base.AcquireTokenSilentParameters{
+ Scopes: scopes,
+ Account: o.account,
+ Claims: o.claims,
+ RequestType: accesstokens.ATPublic,
+ IsAppCache: false,
+ TenantID: o.tenantID,
+ AuthnScheme: o.authnScheme,
+ }
+
+ return pca.base.AcquireTokenSilent(ctx, silentParameters)
+}
+
+// acquireTokenByUsernamePasswordOptions contains optional configuration for AcquireTokenByUsernamePassword
+type acquireTokenByUsernamePasswordOptions struct {
+ claims, tenantID string
+ authnScheme AuthenticationScheme
+}
+
+// AcquireByUsernamePasswordOption is implemented by options for AcquireTokenByUsernamePassword
+type AcquireByUsernamePasswordOption interface {
+ acquireByUsernamePasswordOption()
+}
+
+// AcquireTokenByUsernamePassword acquires a security token from the authority, via Username/Password Authentication.
+// NOTE: this flow is NOT recommended.
+//
+// Options: [WithClaims], [WithTenantID]
+func (pca Client) AcquireTokenByUsernamePassword(ctx context.Context, scopes []string, username, password string, opts ...AcquireByUsernamePasswordOption) (AuthResult, error) {
+ o := acquireTokenByUsernamePasswordOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return AuthResult{}, err
+ }
+ authParams, err := pca.base.AuthParams.WithTenant(o.tenantID)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ authParams.Scopes = scopes
+ authParams.AuthorizationType = authority.ATUsernamePassword
+ authParams.Claims = o.claims
+ authParams.Username = username
+ authParams.Password = password
+ if o.authnScheme != nil {
+ authParams.AuthnScheme = o.authnScheme
+ }
+
+ token, err := pca.base.Token.UsernamePassword(ctx, authParams)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ return pca.base.AuthResultFromToken(ctx, authParams, token, true)
+}
+
+type DeviceCodeResult = accesstokens.DeviceCodeResult
+
+// DeviceCode provides the results of the device code flows first stage (containing the code)
+// that must be entered on the second device and provides a method to retrieve the AuthenticationResult
+// once that code has been entered and verified.
+type DeviceCode struct {
+ // Result holds the information about the device code (such as the code).
+ Result DeviceCodeResult
+
+ authParams authority.AuthParams
+ client Client
+ dc oauth.DeviceCode
+}
+
+// AuthenticationResult retreives the AuthenticationResult once the user enters the code
+// on the second device. Until then it blocks until the .AcquireTokenByDeviceCode() context
+// is cancelled or the token expires.
+func (d DeviceCode) AuthenticationResult(ctx context.Context) (AuthResult, error) {
+ token, err := d.dc.Token(ctx)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ return d.client.base.AuthResultFromToken(ctx, d.authParams, token, true)
+}
+
+// acquireTokenByDeviceCodeOptions contains optional configuration for AcquireTokenByDeviceCode
+type acquireTokenByDeviceCodeOptions struct {
+ claims, tenantID string
+}
+
+// AcquireByDeviceCodeOption is implemented by options for AcquireTokenByDeviceCode
+type AcquireByDeviceCodeOption interface {
+ acquireByDeviceCodeOptions()
+}
+
+// AcquireTokenByDeviceCode acquires a security token from the authority, by acquiring a device code and using that to acquire the token.
+// Users need to create an AcquireTokenDeviceCodeParameters instance and pass it in.
+//
+// Options: [WithClaims], [WithTenantID]
+func (pca Client) AcquireTokenByDeviceCode(ctx context.Context, scopes []string, opts ...AcquireByDeviceCodeOption) (DeviceCode, error) {
+ o := acquireTokenByDeviceCodeOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return DeviceCode{}, err
+ }
+ authParams, err := pca.base.AuthParams.WithTenant(o.tenantID)
+ if err != nil {
+ return DeviceCode{}, err
+ }
+ authParams.Scopes = scopes
+ authParams.AuthorizationType = authority.ATDeviceCode
+ authParams.Claims = o.claims
+
+ dc, err := pca.base.Token.DeviceCode(ctx, authParams)
+ if err != nil {
+ return DeviceCode{}, err
+ }
+
+ return DeviceCode{Result: dc.Result, authParams: authParams, client: pca, dc: dc}, nil
+}
+
+// acquireTokenByAuthCodeOptions contains the optional parameters used to acquire an access token using the authorization code flow.
+type acquireTokenByAuthCodeOptions struct {
+ challenge, claims, tenantID string
+}
+
+// AcquireByAuthCodeOption is implemented by options for AcquireTokenByAuthCode
+type AcquireByAuthCodeOption interface {
+ acquireByAuthCodeOption()
+}
+
+// WithChallenge allows you to provide a code for the .AcquireTokenByAuthCode() call.
+func WithChallenge(challenge string) interface {
+ AcquireByAuthCodeOption
+ options.CallOption
+} {
+ return struct {
+ AcquireByAuthCodeOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *acquireTokenByAuthCodeOptions:
+ t.challenge = challenge
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// AcquireTokenByAuthCode is a request to acquire a security token from the authority, using an authorization code.
+// The specified redirect URI must be the same URI that was used when the authorization code was requested.
+//
+// Options: [WithChallenge], [WithClaims], [WithTenantID]
+func (pca Client) AcquireTokenByAuthCode(ctx context.Context, code string, redirectURI string, scopes []string, opts ...AcquireByAuthCodeOption) (AuthResult, error) {
+ o := acquireTokenByAuthCodeOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return AuthResult{}, err
+ }
+
+ params := base.AcquireTokenAuthCodeParameters{
+ Scopes: scopes,
+ Code: code,
+ Challenge: o.challenge,
+ Claims: o.claims,
+ AppType: accesstokens.ATPublic,
+ RedirectURI: redirectURI,
+ TenantID: o.tenantID,
+ }
+
+ return pca.base.AcquireTokenByAuthCode(ctx, params)
+}
+
+// Accounts gets all the accounts in the token cache.
+// If there are no accounts in the cache the returned slice is empty.
+func (pca Client) Accounts(ctx context.Context) ([]Account, error) {
+ return pca.base.AllAccounts(ctx)
+}
+
+// RemoveAccount signs the account out and forgets account from token cache.
+func (pca Client) RemoveAccount(ctx context.Context, account Account) error {
+ return pca.base.RemoveAccount(ctx, account)
+}
+
+// interactiveAuthOptions contains the optional parameters used to acquire an access token for interactive auth code flow.
+type interactiveAuthOptions struct {
+ claims, domainHint, loginHint, redirectURI, tenantID string
+ openURL func(url string) error
+ authnScheme AuthenticationScheme
+}
+
+// AcquireInteractiveOption is implemented by options for AcquireTokenInteractive
+type AcquireInteractiveOption interface {
+ acquireInteractiveOption()
+}
+
+// WithLoginHint pre-populates the login prompt with a username.
+func WithLoginHint(username string) interface {
+ AcquireInteractiveOption
+ AuthCodeURLOption
+ options.CallOption
+} {
+ return struct {
+ AcquireInteractiveOption
+ AuthCodeURLOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *authCodeURLOptions:
+ t.loginHint = username
+ case *interactiveAuthOptions:
+ t.loginHint = username
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithDomainHint adds the IdP domain as domain_hint query parameter in the auth url.
+func WithDomainHint(domain string) interface {
+ AcquireInteractiveOption
+ AuthCodeURLOption
+ options.CallOption
+} {
+ return struct {
+ AcquireInteractiveOption
+ AuthCodeURLOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *authCodeURLOptions:
+ t.domainHint = domain
+ case *interactiveAuthOptions:
+ t.domainHint = domain
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithRedirectURI sets a port for the local server used in interactive authentication, for
+// example http://localhost:port. All URI components other than the port are ignored.
+func WithRedirectURI(redirectURI string) interface {
+ AcquireInteractiveOption
+ options.CallOption
+} {
+ return struct {
+ AcquireInteractiveOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *interactiveAuthOptions:
+ t.redirectURI = redirectURI
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// WithOpenURL allows you to provide a function to open the browser to complete the interactive login, instead of launching the system default browser.
+func WithOpenURL(openURL func(url string) error) interface {
+ AcquireInteractiveOption
+ options.CallOption
+} {
+ return struct {
+ AcquireInteractiveOption
+ options.CallOption
+ }{
+ CallOption: options.NewCallOption(
+ func(a any) error {
+ switch t := a.(type) {
+ case *interactiveAuthOptions:
+ t.openURL = openURL
+ default:
+ return fmt.Errorf("unexpected options type %T", a)
+ }
+ return nil
+ },
+ ),
+ }
+}
+
+// AcquireTokenInteractive acquires a security token from the authority using the default web browser to select the account.
+// https://docs.microsoft.com/en-us/azure/active-directory/develop/msal-authentication-flows#interactive-and-non-interactive-authentication
+//
+// Options: [WithDomainHint], [WithLoginHint], [WithOpenURL], [WithRedirectURI], [WithTenantID]
+func (pca Client) AcquireTokenInteractive(ctx context.Context, scopes []string, opts ...AcquireInteractiveOption) (AuthResult, error) {
+ o := interactiveAuthOptions{}
+ if err := options.ApplyOptions(&o, opts); err != nil {
+ return AuthResult{}, err
+ }
+ // the code verifier is a random 32-byte sequence that's been base-64 encoded without padding.
+ // it's used to prevent MitM attacks during auth code flow, see https://tools.ietf.org/html/rfc7636
+ cv, challenge, err := codeVerifier()
+ if err != nil {
+ return AuthResult{}, err
+ }
+ var redirectURL *url.URL
+ if o.redirectURI != "" {
+ redirectURL, err = url.Parse(o.redirectURI)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ }
+ if o.openURL == nil {
+ o.openURL = browser.OpenURL
+ }
+ authParams, err := pca.base.AuthParams.WithTenant(o.tenantID)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ authParams.Scopes = scopes
+ authParams.AuthorizationType = authority.ATInteractive
+ authParams.Claims = o.claims
+ authParams.CodeChallenge = challenge
+ authParams.CodeChallengeMethod = "S256"
+ authParams.LoginHint = o.loginHint
+ authParams.DomainHint = o.domainHint
+ authParams.State = uuid.New().String()
+ authParams.Prompt = "select_account"
+ if o.authnScheme != nil {
+ authParams.AuthnScheme = o.authnScheme
+ }
+ res, err := pca.browserLogin(ctx, redirectURL, authParams, o.openURL)
+ if err != nil {
+ return AuthResult{}, err
+ }
+ authParams.Redirecturi = res.redirectURI
+
+ req, err := accesstokens.NewCodeChallengeRequest(authParams, accesstokens.ATPublic, nil, res.authCode, cv)
+ if err != nil {
+ return AuthResult{}, err
+ }
+
+ token, err := pca.base.Token.AuthCode(ctx, req)
+ if err != nil {
+ return AuthResult{}, err
+ }
+
+ return pca.base.AuthResultFromToken(ctx, authParams, token, true)
+}
+
+type interactiveAuthResult struct {
+ authCode string
+ redirectURI string
+}
+
+// parses the port number from the provided URL.
+// returns 0 if nil or no port is specified.
+func parsePort(u *url.URL) (int, error) {
+ if u == nil {
+ return 0, nil
+ }
+ p := u.Port()
+ if p == "" {
+ return 0, nil
+ }
+ return strconv.Atoi(p)
+}
+
+// browserLogin calls openURL and waits for a user to log in
+func (pca Client) browserLogin(ctx context.Context, redirectURI *url.URL, params authority.AuthParams, openURL func(string) error) (interactiveAuthResult, error) {
+ // start local redirect server so login can call us back
+ port, err := parsePort(redirectURI)
+ if err != nil {
+ return interactiveAuthResult{}, err
+ }
+ srv, err := local.New(params.State, port)
+ if err != nil {
+ return interactiveAuthResult{}, err
+ }
+ defer srv.Shutdown()
+ params.Scopes = accesstokens.AppendDefaultScopes(params)
+ authURL, err := pca.base.AuthCodeURL(ctx, params.ClientID, srv.Addr, params.Scopes, params)
+ if err != nil {
+ return interactiveAuthResult{}, err
+ }
+ // open browser window so user can select credentials
+ if err := openURL(authURL); err != nil {
+ return interactiveAuthResult{}, err
+ }
+ // now wait until the logic calls us back
+ res := srv.Result(ctx)
+ if res.Err != nil {
+ return interactiveAuthResult{}, res.Err
+ }
+ return interactiveAuthResult{
+ authCode: res.Code,
+ redirectURI: srv.Addr,
+ }, nil
+}
+
+// creates a code verifier string along with its SHA256 hash which
+// is used as the challenge when requesting an auth code.
+// used in interactive auth flow for PKCE.
+func codeVerifier() (codeVerifier string, challenge string, err error) {
+ cvBytes := make([]byte, 32)
+ if _, err = rand.Read(cvBytes); err != nil {
+ return
+ }
+ codeVerifier = base64.RawURLEncoding.EncodeToString(cvBytes)
+ // for PKCE, create a hash of the code verifier
+ cvh := sha256.Sum256([]byte(codeVerifier))
+ challenge = base64.RawURLEncoding.EncodeToString(cvh[:])
+ return
+}
@@ -0,0 +1,14 @@
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, build with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+.DS_Store
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018 Johannes Kaufmann
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
@@ -0,0 +1,242 @@
+# html-to-markdown
+
+[](https://goreportcard.com/report/github.com/JohannesKaufmann/html-to-markdown)
+[](https://codecov.io/gh/JohannesKaufmann/html-to-markdown)
+
+[](http://godoc.org/github.com/JohannesKaufmann/html-to-markdown)
+
+
+
+Convert HTML into Markdown with Go. It is using an [HTML Parser](https://github.com/PuerkitoBio/goquery) to avoid the use of `regexp` as much as possible. That should prevent some [weird cases](https://stackoverflow.com/a/1732454) and allows it to be used for cases where the input is totally unknown.
+
+## Installation
+
+```
+go get github.com/JohannesKaufmann/html-to-markdown
+```
+
+## Usage
+
+```go
+import (
+ "fmt"
+ "log"
+
+ md "github.com/JohannesKaufmann/html-to-markdown"
+)
+
+converter := md.NewConverter("", true, nil)
+
+html := `<strong>Important</strong>`
+
+markdown, err := converter.ConvertString(html)
+if err != nil {
+ log.Fatal(err)
+}
+fmt.Println("md ->", markdown)
+```
+
+If you are already using [goquery](https://github.com/PuerkitoBio/goquery) you can pass a selection to `Convert`.
+
+```go
+markdown, err := converter.Convert(selec)
+```
+
+### Using it on the command line
+
+If you want to make use of `html-to-markdown` on the command line without any Go coding, check out [`html2md`](https://github.com/suntong/html2md#usage), a cli wrapper for `html-to-markdown` that has all the following options and plugins builtin.
+
+## Options
+
+The third parameter to `md.NewConverter` is `*md.Options`.
+
+For example you can change the character that is around a bold text ("`**`") to a different one (for example "`__`") by changing the value of `StrongDelimiter`.
+
+```go
+opt := &md.Options{
+ StrongDelimiter: "__", // default: **
+ // ...
+}
+converter := md.NewConverter("", true, opt)
+```
+
+For all the possible options look at [godocs](https://godoc.org/github.com/JohannesKaufmann/html-to-markdown/#Options) and for a example look at the [example](/examples/options/main.go).
+
+## Adding Rules
+
+```go
+converter.AddRules(
+ md.Rule{
+ Filter: []string{"del", "s", "strike"},
+ Replacement: func(content string, selec *goquery.Selection, opt *md.Options) *string {
+ // You need to return a pointer to a string (md.String is just a helper function).
+ // If you return nil the next function for that html element
+ // will be picked. For example you could only convert an element
+ // if it has a certain class name and fallback if not.
+ content = strings.TrimSpace(content)
+ return md.String("~" + content + "~")
+ },
+ },
+ // more rules
+)
+```
+
+For more information have a look at the example [add_rules](/examples/add_rules/main.go).
+
+## Using Plugins
+
+If you want plugins (github flavored markdown like striketrough, tables, ...) you can pass it to `Use`.
+
+```go
+import "github.com/JohannesKaufmann/html-to-markdown/plugin"
+
+// Use the `GitHubFlavored` plugin from the `plugin` package.
+converter.Use(plugin.GitHubFlavored())
+```
+
+Or if you only want to use the `Strikethrough` plugin. You can change the character that distinguishes
+the text that is crossed out by setting the first argument to a different value (for example "~~" instead of "~").
+
+```go
+converter.Use(plugin.Strikethrough(""))
+```
+
+For more information have a look at the example [github_flavored](/examples/github_flavored/main.go).
+
+---
+
+These are the plugins located in the [plugin folder](/plugin) which you can use by importing "github.com/JohannesKaufmann/html-to-markdown/plugin".
+
+| Name | Description |
+| --------------------- | ------------------------------------------------------------------------------------------- |
+| GitHubFlavored | GitHub's Flavored Markdown contains `TaskListItems`, `Strikethrough` and `Table`. |
+| TaskListItems | (Included in `GitHubFlavored`). Converts `<input>` checkboxes into `- [x] Task`. |
+| Strikethrough | (Included in `GitHubFlavored`). Converts `<strike>`, `<s>`, and `<del>` to the `~~` syntax. |
+| Table | (Included in `GitHubFlavored`). Convert a `<table>` into something like this... |
+| TableCompat | |
+| | |
+| VimeoEmbed | |
+| YoutubeEmbed | |
+| | |
+| ConfluenceCodeBlock | Converts `<ac:structured-macro>` elements that are used in Atlassian’s Wiki "Confluence". |
+| ConfluenceAttachments | Converts `<ri:attachment ri:filename=""/>` elements. |
+
+These are the plugins in other repositories:
+
+| Name | Description |
+| ---------------------------- | ------------------- |
+| \[Plugin Name\]\(Your Link\) | A short description |
+
+I you write a plugin, feel free to open a PR that adds your Plugin to this list.
+
+## Writing Plugins
+
+Have a look at the [plugin folder](/plugin) for a reference implementation. The most basic one is [Strikethrough](/plugin/strikethrough.go).
+
+## Security
+
+This library produces markdown that is readable and can be changed by humans.
+
+Once you convert this markdown back to HTML (e.g. using [goldmark](https://github.com/yuin/goldmark) or [blackfriday](https://github.com/russross/blackfriday)) you need to be careful of malicious content.
+
+This library does NOT sanitize untrusted content. Use an HTML sanitizer such as [bluemonday](https://github.com/microcosm-cc/bluemonday) before displaying the HTML in the browser.
+
+## Other Methods
+
+[Godoc](https://godoc.org/github.com/JohannesKaufmann/html-to-markdown)
+
+### `func (c *Converter) Keep(tags ...string) *Converter`
+
+Determines which elements are to be kept and rendered as HTML.
+
+### `func (c *Converter) Remove(tags ...string) *Converter`
+
+Determines which elements are to be removed altogether i.e. converted to an empty string.
+
+## Escaping
+
+Some characters have a special meaning in markdown. For example, the character "\*" can be used for lists, emphasis and dividers. By placing a backlash before that character (e.g. "\\\*") you can "escape" it. Then the character will render as a raw "\*" without the _"markdown meaning"_ applied.
+
+But why is "escaping" even necessary?
+
+<!-- prettier-ignore -->
+```md
+Paragraph 1
+-
+Paragraph 2
+```
+
+The markdown above doesn't seem that problematic. But "Paragraph 1" (with only one hyphen below) will be recognized as a _setext heading_.
+
+```html
+<h2>Paragraph 1</h2>
+<p>Paragraph 2</p>
+```
+
+A well-placed backslash character would prevent that...
+
+<!-- prettier-ignore -->
+```md
+Paragraph 1
+\-
+Paragraph 2
+```
+
+---
+
+How to configure escaping? Depending on the `EscapeMode` option, the markdown output is going to be different.
+
+```go
+opt = &md.Options{
+ EscapeMode: "basic", // default
+}
+```
+
+Lets try it out with this HTML input:
+
+| | |
+| -------- | ----------------------------------------------------- |
+| input | `<p>fake **bold** and real <strong>bold</strong></p>` |
+| | |
+| | **With EscapeMode "basic"** |
+| output | `fake \*\*bold\*\* and real **bold**` |
+| rendered | fake \*\*bold\*\* and real **bold** |
+| | |
+| | **With EscapeMode "disabled"** |
+| output | `fake **bold** and real **bold**` |
+| rendered | fake **bold** and real **bold** |
+
+With **basic** escaping, we get some escape characters (the backlash "\\") but it renders correctly.
+
+With escaping **disabled**, the fake and real bold can't be distinguished in the markdown. That means it is both going to render as bold.
+
+---
+
+So now you know the purpose of escaping. However, if you encounter some content where the escaping breaks, you can manually disable it. But please also open an issue!
+
+## Issues
+
+If you find HTML snippets (or even full websites) that don't produce the expected results, please open an issue!
+
+## Contributing & Testing
+
+Please first discuss the change you wish to make, by opening an issue. I'm also happy to guide you to where a change is most likely needed.
+
+_Note: The outside API should not change because of backwards compatibility..._
+
+You don't have to be afraid of breaking the converter, since there are many "Golden File Tests":
+
+Add your problematic HTML snippet to one of the `input.html` files in the `testdata` folder. Then run `go test -update` and have a look at which `.golden` files changed in GIT.
+
+You can now change the internal logic and inspect what impact your change has by running `go test -update` again.
+
+_Note: Before submitting your change as a PR, make sure that you run those tests and check the files into GIT..._
+
+## Related Projects
+
+- [turndown (js)](https://github.com/domchristie/turndown), a very good library written in javascript.
+- [lunny/html2md](https://github.com/lunny/html2md), which is using [regex instead of goquery](https://stackoverflow.com/a/1732454). I came around a few edge case when using it (leaving some html comments, ...) so I wrote my own.
+
+## License
+
+This project is licensed under the terms of the MIT license.
@@ -0,0 +1,6 @@
+# Security Policy
+
+## Reporting a Vulnerability
+
+Please report (suspected) security vulnerabilities to johannes@joina.de with the subject _"Security html-to-markdown"_ and you will receive a response within 48 hours.
+
@@ -0,0 +1,393 @@
+package md
+
+import (
+ "fmt"
+ "unicode"
+
+ "regexp"
+ "strconv"
+ "strings"
+ "unicode/utf8"
+
+ "github.com/JohannesKaufmann/html-to-markdown/escape"
+ "github.com/PuerkitoBio/goquery"
+)
+
+var multipleSpacesR = regexp.MustCompile(` +`)
+
+var commonmark = []Rule{
+ {
+ Filter: []string{"ul", "ol"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ parent := selec.Parent()
+
+ // we have a nested list, were the ul/ol is inside a list item
+ // -> based on work done by @requilence from @anytypeio
+ if (parent.Is("li") || parent.Is("ul") || parent.Is("ol")) && parent.Children().Last().IsSelection(selec) {
+ // add a line break prefix if the parent's text node doesn't have it.
+ // that makes sure that every list item is on its on line
+ lastContentTextNode := strings.TrimRight(parent.Nodes[0].FirstChild.Data, " \t")
+ if !strings.HasSuffix(lastContentTextNode, "\n") {
+ content = "\n" + content
+ }
+
+ // remove empty lines between lists
+ trimmedSpaceContent := strings.TrimRight(content, " \t")
+ if strings.HasSuffix(trimmedSpaceContent, "\n") {
+ content = strings.TrimRightFunc(content, unicode.IsSpace)
+ }
+ } else {
+ content = "\n\n" + content + "\n\n"
+ }
+ return &content
+ },
+ },
+ {
+ Filter: []string{"li"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ if strings.TrimSpace(content) == "" {
+ return nil
+ }
+
+ // remove leading newlines
+ content = leadingNewlinesR.ReplaceAllString(content, "")
+ // replace trailing newlines with just a single one
+ content = trailingNewlinesR.ReplaceAllString(content, "\n")
+ // remove leading spaces
+ content = strings.TrimLeft(content, " ")
+
+ prefix := selec.AttrOr(attrListPrefix, "")
+
+ // `prefixCount` is not nessesarily the length of the empty string `prefix`
+ // but how much space is reserved for the prefixes of the siblings.
+ prefixCount, previousPrefixCounts := countListParents(opt, selec)
+
+ // if the prefix is not needed, balance it by adding the usual prefix spaces
+ if prefix == "" {
+ prefix = strings.Repeat(" ", prefixCount)
+ }
+ // indent the prefix so that the nested links are represented
+ indent := strings.Repeat(" ", previousPrefixCounts)
+ prefix = indent + prefix
+
+ content = IndentMultiLineListItem(opt, content, prefixCount+previousPrefixCounts)
+
+ return String(prefix + content + "\n")
+ },
+ },
+ {
+ Filter: []string{"#text"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ text := selec.Text()
+ if trimmed := strings.TrimSpace(text); trimmed == "" {
+ return String("")
+ }
+ text = tabR.ReplaceAllString(text, " ")
+
+ // replace multiple spaces by one space: dont accidentally make
+ // normal text be indented and thus be a code block.
+ text = multipleSpacesR.ReplaceAllString(text, " ")
+
+ if opt.EscapeMode == "basic" {
+ text = escape.MarkdownCharacters(text)
+ }
+
+ // if its inside a list, trim the spaces to not mess up the indentation
+ parent := selec.Parent()
+ next := selec.Next()
+ if IndexWithText(selec) == 0 &&
+ (parent.Is("li") || parent.Is("ol") || parent.Is("ul")) &&
+ (next.Is("ul") || next.Is("ol")) {
+ // trim only spaces and not new lines
+ text = strings.Trim(text, ` `)
+ }
+
+ return &text
+ },
+ },
+ {
+ Filter: []string{"p", "div"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ parent := goquery.NodeName(selec.Parent())
+ if IsInlineElement(parent) || parent == "li" {
+ content = "\n" + content + "\n"
+ return &content
+ }
+
+ // remove unnecessary spaces to have clean markdown
+ content = TrimpLeadingSpaces(content)
+
+ content = "\n\n" + content + "\n\n"
+ return &content
+ },
+ },
+ {
+ Filter: []string{"h1", "h2", "h3", "h4", "h5", "h6"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ if strings.TrimSpace(content) == "" {
+ return nil
+ }
+
+ content = strings.Replace(content, "\n", " ", -1)
+ content = strings.Replace(content, "\r", " ", -1)
+ content = strings.Replace(content, `#`, `\#`, -1)
+ content = strings.TrimSpace(content)
+
+ insideLink := selec.ParentsFiltered("a").Length() > 0
+ if insideLink {
+ text := opt.StrongDelimiter + content + opt.StrongDelimiter
+ text = AddSpaceIfNessesary(selec, text)
+ return &text
+ }
+
+ node := goquery.NodeName(selec)
+ level, err := strconv.Atoi(node[1:])
+ if err != nil {
+ return nil
+ }
+
+ if opt.HeadingStyle == "setext" && level < 3 {
+ line := "-"
+ if level == 1 {
+ line = "="
+ }
+
+ underline := strings.Repeat(line, len(content))
+ return String("\n\n" + content + "\n" + underline + "\n\n")
+ }
+
+ prefix := strings.Repeat("#", level)
+ text := "\n\n" + prefix + " " + content + "\n\n"
+ return &text
+ },
+ },
+ {
+ Filter: []string{"strong", "b"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ // only use one bold tag if they are nested
+ parent := selec.Parent()
+ if parent.Is("strong") || parent.Is("b") {
+ return &content
+ }
+
+ trimmed := strings.TrimSpace(content)
+ if trimmed == "" {
+ return &trimmed
+ }
+
+ // If there is a newline character between the start and end delimiter
+ // the delimiters won't be recognized. Either we remove all newline characters
+ // OR on _every_ line we put start & end delimiters.
+ trimmed = delimiterForEveryLine(trimmed, opt.StrongDelimiter)
+
+ // Always have a space to the side to recognize the delimiter
+ trimmed = AddSpaceIfNessesary(selec, trimmed)
+
+ return &trimmed
+ },
+ },
+ {
+ Filter: []string{"i", "em"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ // only use one italic tag if they are nested
+ parent := selec.Parent()
+ if parent.Is("i") || parent.Is("em") {
+ return &content
+ }
+
+ trimmed := strings.TrimSpace(content)
+ if trimmed == "" {
+ return &trimmed
+ }
+
+ // If there is a newline character between the start and end delimiter
+ // the delimiters won't be recognized. Either we remove all newline characters
+ // OR on _every_ line we put start & end delimiters.
+ trimmed = delimiterForEveryLine(trimmed, opt.EmDelimiter)
+
+ // Always have a space to the side to recognize the delimiter
+ trimmed = AddSpaceIfNessesary(selec, trimmed)
+
+ return &trimmed
+ },
+ },
+ {
+ Filter: []string{"img"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ src := selec.AttrOr("src", "")
+ src = strings.TrimSpace(src)
+ if src == "" {
+ return String("")
+ }
+
+ src = opt.GetAbsoluteURL(selec, src, opt.domain)
+
+ alt := selec.AttrOr("alt", "")
+ alt = strings.Replace(alt, "\n", " ", -1)
+
+ text := fmt.Sprintf("", alt, src)
+ return &text
+ },
+ },
+ {
+ Filter: []string{"a"},
+ AdvancedReplacement: func(content string, selec *goquery.Selection, opt *Options) (AdvancedResult, bool) {
+ // if there is no href, no link is used. So just return the content inside the link
+ href, ok := selec.Attr("href")
+ if !ok || strings.TrimSpace(href) == "" || strings.TrimSpace(href) == "#" {
+ return AdvancedResult{
+ Markdown: content,
+ }, false
+ }
+
+ href = opt.GetAbsoluteURL(selec, href, opt.domain)
+
+ // having multiline content inside a link is a bit tricky
+ content = EscapeMultiLine(content)
+
+ var title string
+ if t, ok := selec.Attr("title"); ok {
+ t = strings.Replace(t, "\n", " ", -1)
+ // escape all quotes
+ t = strings.Replace(t, `"`, `\"`, -1)
+ title = fmt.Sprintf(` "%s"`, t)
+ }
+
+ // if there is no link content (for example because it contains an svg)
+ // the 'title' or 'aria-label' attribute is used instead.
+ if strings.TrimSpace(content) == "" {
+ content = selec.AttrOr("title", selec.AttrOr("aria-label", ""))
+ }
+
+ // a link without text won't de displayed anyway
+ if content == "" {
+ return AdvancedResult{}, true
+ }
+
+ if opt.LinkStyle == "inlined" {
+ md := fmt.Sprintf("[%s](%s%s)", content, href, title)
+ md = AddSpaceIfNessesary(selec, md)
+
+ return AdvancedResult{
+ Markdown: md,
+ }, false
+ }
+
+ var replacement string
+ var reference string
+
+ switch opt.LinkReferenceStyle {
+ case "collapsed":
+
+ replacement = "[" + content + "][]"
+ reference = "[" + content + "]: " + href + title
+ case "shortcut":
+ replacement = "[" + content + "]"
+ reference = "[" + content + "]: " + href + title
+
+ default:
+ id := selec.AttrOr("data-index", "")
+ replacement = "[" + content + "][" + id + "]"
+ reference = "[" + id + "]: " + href + title
+ }
+
+ replacement = AddSpaceIfNessesary(selec, replacement)
+ return AdvancedResult{Markdown: replacement, Footer: reference}, false
+ },
+ },
+ {
+ Filter: []string{"code", "kbd", "samp", "tt"},
+ Replacement: func(_ string, selec *goquery.Selection, opt *Options) *string {
+ code := getCodeContent(selec)
+
+ // Newlines in the text aren't great, since this is inline code and not a code block.
+ // Newlines will be stripped anyway in the browser, but it won't be recognized as code
+ // from the markdown parser when there is more than one newline.
+ // So limit to
+ code = multipleNewLinesRegex.ReplaceAllString(code, "\n")
+
+ fenceChar := '`'
+ maxCount := calculateCodeFenceOccurrences(fenceChar, code)
+ maxCount++
+
+ fence := strings.Repeat(string(fenceChar), maxCount)
+
+ // code block contains a backtick as first character
+ if strings.HasPrefix(code, "`") {
+ code = " " + code
+ }
+ // code block contains a backtick as last character
+ if strings.HasSuffix(code, "`") {
+ code = code + " "
+ }
+
+ // TODO: configure delimeter in options?
+ text := fence + code + fence
+ text = AddSpaceIfNessesary(selec, text)
+ return &text
+ },
+ },
+ {
+ Filter: []string{"pre"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ codeElement := selec.Find("code")
+ language := codeElement.AttrOr("class", "")
+ language = strings.Replace(language, "language-", "", 1)
+
+ code := getCodeContent(selec)
+
+ fenceChar, _ := utf8.DecodeRuneInString(opt.Fence)
+ fence := CalculateCodeFence(fenceChar, code)
+
+ text := "\n\n" + fence + language + "\n" +
+ code +
+ "\n" + fence + "\n\n"
+ return &text
+ },
+ },
+ {
+ Filter: []string{"hr"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ // e.g. `## --- Heading` would look weird, so don't render a divider if inside a heading
+ insideHeading := selec.ParentsFiltered("h1,h2,h3,h4,h5,h6").Length() > 0
+ if insideHeading {
+ return String("")
+ }
+
+ text := "\n\n" + opt.HorizontalRule + "\n\n"
+ return &text
+ },
+ },
+ {
+ Filter: []string{"br"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ return String("\n\n")
+ },
+ },
+ {
+ Filter: []string{"blockquote"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ content = strings.TrimSpace(content)
+ if content == "" {
+ return nil
+ }
+
+ content = multipleNewLinesRegex.ReplaceAllString(content, "\n\n")
+
+ var beginningR = regexp.MustCompile(`(?m)^`)
+ content = beginningR.ReplaceAllString(content, "> ")
+
+ text := "\n\n" + content + "\n\n"
+ return &text
+ },
+ },
+ {
+ Filter: []string{"noscript"},
+ Replacement: func(content string, selec *goquery.Selection, opt *Options) *string {
+ // for now remove the contents of noscript. But in the future we could
+ // tell goquery to parse the contents of the tag.
+ // -> https://github.com/PuerkitoBio/goquery/issues/139#issuecomment-517526070
+ return nil
+ },
+ },
+}
@@ -0,0 +1,65 @@
+// Package escape escapes characters that are commonly used in
+// markdown like the * for strong/italic.
+package escape
+
+import (
+ "regexp"
+ "strings"
+)
+
+var backslash = regexp.MustCompile(`\\(\S)`)
+var heading = regexp.MustCompile(`(?m)^(#{1,6} )`)
+var orderedList = regexp.MustCompile(`(?m)^(\W* {0,3})(\d+)\. `)
+var unorderedList = regexp.MustCompile(`(?m)^([^\\\w]*)[*+-] `)
+var horizontalDivider = regexp.MustCompile(`(?m)^([-*_] *){3,}$`)
+var blockquote = regexp.MustCompile(`(?m)^(\W* {0,3})> `)
+var link = regexp.MustCompile(`([\[\]])`)
+
+var replacer = strings.NewReplacer(
+ `*`, `\*`,
+ `_`, `\_`,
+ "`", "\\`",
+ `|`, `\|`,
+)
+
+// MarkdownCharacters escapes common markdown characters so that
+// `<p>**Not Bold**</p> ends up as correct markdown `\*\*Not Strong\*\*`.
+// No worry, the escaped characters will display fine, just without the formatting.
+func MarkdownCharacters(text string) string {
+ // Escape backslash escapes!
+ text = backslash.ReplaceAllString(text, `\\$1`)
+
+ // Escape headings
+ text = heading.ReplaceAllString(text, `\$1`)
+
+ // Escape hr
+ text = horizontalDivider.ReplaceAllStringFunc(text, func(t string) string {
+ if strings.Contains(t, "-") {
+ return strings.Replace(t, "-", `\-`, 3)
+ } else if strings.Contains(t, "_") {
+ return strings.Replace(t, "_", `\_`, 3)
+ }
+ return strings.Replace(t, "*", `\*`, 3)
+ })
+
+ // Escape ol bullet points
+ text = orderedList.ReplaceAllString(text, `$1$2\. `)
+
+ // Escape ul bullet points
+ text = unorderedList.ReplaceAllStringFunc(text, func(t string) string {
+ return regexp.MustCompile(`([*+-])`).ReplaceAllString(t, `\$1`)
+ })
+
+ // Escape blockquote indents
+ text = blockquote.ReplaceAllString(text, `$1\> `)
+
+ // Escape em/strong *
+ // Escape em/strong _
+ // Escape code _
+ text = replacer.Replace(text)
+
+ // Escape link & image brackets
+ text = link.ReplaceAllString(text, `\$1`)
+
+ return text
+}
@@ -0,0 +1,464 @@
+// Package md converts html to markdown.
+//
+// converter := md.NewConverter("", true, nil)
+//
+// html = `<strong>Important</strong>`
+//
+// markdown, err := converter.ConvertString(html)
+// if err != nil {
+// log.Fatal(err)
+// }
+// fmt.Println("md ->", markdown)
+// Or if you are already using goquery:
+// markdown, err := converter.Convert(selec)
+package md
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ "net/http"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+type simpleRuleFunc func(content string, selec *goquery.Selection, options *Options) *string
+type ruleFunc func(content string, selec *goquery.Selection, options *Options) (res AdvancedResult, skip bool)
+
+// BeforeHook runs before the converter and can be used to transform the original html
+type BeforeHook func(selec *goquery.Selection)
+
+// Afterhook runs after the converter and can be used to transform the resulting markdown
+type Afterhook func(markdown string) string
+
+// Converter is initialized by NewConverter.
+type Converter struct {
+ mutex sync.RWMutex
+ rules map[string][]ruleFunc
+ keep map[string]struct{}
+ remove map[string]struct{}
+
+ before []BeforeHook
+ after []Afterhook
+
+ domain string
+ options Options
+}
+
+func validate(val string, possible ...string) error {
+ for _, e := range possible {
+ if e == val {
+ return nil
+ }
+ }
+ return fmt.Errorf("field must be one of %v but got %s", possible, val)
+}
+func validateOptions(opt Options) error {
+ if err := validate(opt.HeadingStyle, "setext", "atx"); err != nil {
+ return err
+ }
+ if strings.Count(opt.HorizontalRule, "*") < 3 &&
+ strings.Count(opt.HorizontalRule, "_") < 3 &&
+ strings.Count(opt.HorizontalRule, "-") < 3 {
+ return errors.New("HorizontalRule must be at least 3 characters of '*', '_' or '-' but got " + opt.HorizontalRule)
+ }
+
+ if err := validate(opt.BulletListMarker, "-", "+", "*"); err != nil {
+ return err
+ }
+ if err := validate(opt.CodeBlockStyle, "indented", "fenced"); err != nil {
+ return err
+ }
+ if err := validate(opt.Fence, "```", "~~~"); err != nil {
+ return err
+ }
+ if err := validate(opt.EmDelimiter, "_", "*"); err != nil {
+ return err
+ }
+ if err := validate(opt.StrongDelimiter, "**", "__"); err != nil {
+ return err
+ }
+ if err := validate(opt.LinkStyle, "inlined", "referenced"); err != nil {
+ return err
+ }
+ if err := validate(opt.LinkReferenceStyle, "full", "collapsed", "shortcut"); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+var (
+ attrListPrefix = "data-converter-list-prefix"
+)
+
+// NewConverter initializes a new converter and holds all the rules.
+// - `domain` is used for links and images to convert relative urls ("/image.png") to absolute urls.
+// - CommonMark is the default set of rules. Set enableCommonmark to false if you want
+// to customize everything using AddRules and DONT want to fallback to default rules.
+func NewConverter(domain string, enableCommonmark bool, options *Options) *Converter {
+ conv := &Converter{
+ domain: domain,
+ rules: make(map[string][]ruleFunc),
+ keep: make(map[string]struct{}),
+ remove: make(map[string]struct{}),
+ }
+
+ conv.before = append(conv.before, func(selec *goquery.Selection) {
+ selec.Find("a[href]").Each(func(i int, s *goquery.Selection) {
+ // TODO: don't hardcode "data-index" and rename it to avoid accidental conflicts
+ s.SetAttr("data-index", strconv.Itoa(i+1))
+ })
+ })
+ conv.before = append(conv.before, func(selec *goquery.Selection) {
+ selec.Find("li").Each(func(i int, s *goquery.Selection) {
+ prefix := getListPrefix(options, s)
+
+ s.SetAttr(attrListPrefix, prefix)
+ })
+ })
+ conv.after = append(conv.after, func(markdown string) string {
+ markdown = strings.TrimSpace(markdown)
+ markdown = multipleNewLinesRegex.ReplaceAllString(markdown, "\n\n")
+
+ // remove unnecessary trailing spaces to have clean markdown
+ markdown = TrimTrailingSpaces(markdown)
+
+ return markdown
+ })
+
+ if enableCommonmark {
+ conv.AddRules(commonmark...)
+ conv.remove["script"] = struct{}{}
+ conv.remove["style"] = struct{}{}
+ conv.remove["textarea"] = struct{}{}
+ }
+
+ // TODO: put domain in options?
+ if options == nil {
+ options = &Options{}
+ }
+ if options.HeadingStyle == "" {
+ options.HeadingStyle = "atx"
+ }
+ if options.HorizontalRule == "" {
+ options.HorizontalRule = "* * *"
+ }
+ if options.BulletListMarker == "" {
+ options.BulletListMarker = "-"
+ }
+ if options.CodeBlockStyle == "" {
+ options.CodeBlockStyle = "indented"
+ }
+ if options.Fence == "" {
+ options.Fence = "```"
+ }
+ if options.EmDelimiter == "" {
+ options.EmDelimiter = "_"
+ }
+ if options.StrongDelimiter == "" {
+ options.StrongDelimiter = "**"
+ }
+ if options.LinkStyle == "" {
+ options.LinkStyle = "inlined"
+ }
+ if options.LinkReferenceStyle == "" {
+ options.LinkReferenceStyle = "full"
+ }
+ if options.EscapeMode == "" {
+ options.EscapeMode = "basic"
+ }
+
+ // for now, store it in the options
+ options.domain = domain
+
+ if options.GetAbsoluteURL == nil {
+ options.GetAbsoluteURL = DefaultGetAbsoluteURL
+ }
+
+ conv.options = *options
+ err := validateOptions(conv.options)
+ if err != nil {
+ log.Println("markdown options is not valid:", err)
+ }
+
+ return conv
+}
+func (conv *Converter) getRuleFuncs(tag string) []ruleFunc {
+ conv.mutex.RLock()
+ defer conv.mutex.RUnlock()
+
+ r, ok := conv.rules[tag]
+ if !ok || len(r) == 0 {
+ if _, keep := conv.keep[tag]; keep {
+ return []ruleFunc{wrap(ruleKeep)}
+ }
+ if _, remove := conv.remove[tag]; remove {
+ return nil // TODO:
+ }
+
+ return []ruleFunc{wrap(ruleDefault)}
+ }
+
+ return r
+}
+
+func wrap(simple simpleRuleFunc) ruleFunc {
+ return func(content string, selec *goquery.Selection, opt *Options) (AdvancedResult, bool) {
+ res := simple(content, selec, opt)
+ if res == nil {
+ return AdvancedResult{}, true
+ }
+ return AdvancedResult{Markdown: *res}, false
+ }
+}
+
+// Before registers a hook that is run before the conversion. It
+// can be used to transform the original goquery html document.
+//
+// For example, the default before hook adds an index to every link,
+// so that the `a` tag rule (for "reference" "full") can have an incremental number.
+func (conv *Converter) Before(hooks ...BeforeHook) *Converter {
+ conv.mutex.Lock()
+ defer conv.mutex.Unlock()
+
+ for _, hook := range hooks {
+ conv.before = append(conv.before, hook)
+ }
+
+ return conv
+}
+
+// After registers a hook that is run after the conversion. It
+// can be used to transform the markdown document that is about to be returned.
+//
+// For example, the default after hook trims the returned markdown.
+func (conv *Converter) After(hooks ...Afterhook) *Converter {
+ conv.mutex.Lock()
+ defer conv.mutex.Unlock()
+
+ for _, hook := range hooks {
+ conv.after = append(conv.after, hook)
+ }
+
+ return conv
+}
+
+// ClearBefore clears the current before hooks (including the default before hooks).
+func (conv *Converter) ClearBefore() *Converter {
+ conv.mutex.Lock()
+ defer conv.mutex.Unlock()
+
+ conv.before = nil
+
+ return conv
+}
+
+// ClearAfter clears the current after hooks (including the default after hooks).
+func (conv *Converter) ClearAfter() *Converter {
+ conv.mutex.Lock()
+ defer conv.mutex.Unlock()
+
+ conv.after = nil
+
+ return conv
+}
+
+// AddRules adds the rules that are passed in to the converter.
+//
+// By default it overrides the rule for that html tag. You can
+// fall back to the default rule by returning nil.
+func (conv *Converter) AddRules(rules ...Rule) *Converter {
+ conv.mutex.Lock()
+ defer conv.mutex.Unlock()
+
+ for _, rule := range rules {
+ if len(rule.Filter) == 0 {
+ log.Println("you need to specify at least one filter for your rule")
+ }
+ for _, filter := range rule.Filter {
+ r, _ := conv.rules[filter]
+
+ if rule.AdvancedReplacement != nil {
+ r = append(r, rule.AdvancedReplacement)
+ } else {
+ r = append(r, wrap(rule.Replacement))
+ }
+ conv.rules[filter] = r
+ }
+ }
+
+ return conv
+}
+
+// Keep certain html tags in the generated output.
+func (conv *Converter) Keep(tags ...string) *Converter {
+ conv.mutex.Lock()
+ defer conv.mutex.Unlock()
+
+ for _, tag := range tags {
+ conv.keep[tag] = struct{}{}
+ }
+ return conv
+}
+
+// Remove certain html tags from the source.
+func (conv *Converter) Remove(tags ...string) *Converter {
+ conv.mutex.Lock()
+ defer conv.mutex.Unlock()
+ for _, tag := range tags {
+ conv.remove[tag] = struct{}{}
+ }
+ return conv
+}
+
+// Plugin can be used to extends functionality beyond what
+// is offered by commonmark.
+type Plugin func(conv *Converter) []Rule
+
+// Use can be used to add additional functionality to the converter. It is
+// used when its not sufficient to use only rules for example in Plugins.
+func (conv *Converter) Use(plugins ...Plugin) *Converter {
+ for _, plugin := range plugins {
+ rules := plugin(conv)
+ conv.AddRules(rules...) // TODO: for better performance only use one lock for all plugins
+ }
+ return conv
+}
+
+// Timeout for the http client
+var Timeout = time.Second * 10
+var netClient = &http.Client{
+ Timeout: Timeout,
+}
+
+// DomainFromURL returns `u.Host` from the parsed url.
+func DomainFromURL(rawURL string) string {
+ rawURL = strings.TrimSpace(rawURL)
+
+ u, _ := url.Parse(rawURL)
+ if u != nil && u.Host != "" {
+ return u.Host
+ }
+
+ // lets try it again by adding a scheme
+ u, _ = url.Parse("http://" + rawURL)
+ if u != nil {
+ return u.Host
+ }
+
+ return ""
+}
+
+// Reduce many newline characters `\n` to at most 2 new line characters.
+var multipleNewLinesRegex = regexp.MustCompile(`[\n]{2,}`)
+
+// Convert returns the content from a goquery selection.
+// If you have a goquery document just pass in doc.Selection.
+func (conv *Converter) Convert(selec *goquery.Selection) string {
+ conv.mutex.RLock()
+ domain := conv.domain
+ options := conv.options
+ l := len(conv.rules)
+ if l == 0 {
+ log.Println("you have added no rules. either enable commonmark or add you own.")
+ }
+ before := conv.before
+ after := conv.after
+ conv.mutex.RUnlock()
+
+ // before hook
+ for _, hook := range before {
+ hook(selec)
+ }
+
+ res := conv.selecToMD(domain, selec, &options)
+ markdown := res.Markdown
+
+ if res.Header != "" {
+ markdown = res.Header + "\n\n" + markdown
+ }
+ if res.Footer != "" {
+ markdown += "\n\n" + res.Footer
+ }
+
+ // after hook
+ for _, hook := range after {
+ markdown = hook(markdown)
+ }
+
+ return markdown
+}
+
+// ConvertReader returns the content from a reader and returns a buffer.
+func (conv *Converter) ConvertReader(reader io.Reader) (bytes.Buffer, error) {
+ var buffer bytes.Buffer
+ doc, err := goquery.NewDocumentFromReader(reader)
+ if err != nil {
+ return buffer, err
+ }
+ buffer.WriteString(
+ conv.Convert(doc.Selection),
+ )
+
+ return buffer, nil
+}
+
+// ConvertResponse returns the content from a html response.
+func (conv *Converter) ConvertResponse(res *http.Response) (string, error) {
+ doc, err := goquery.NewDocumentFromResponse(res)
+ if err != nil {
+ return "", err
+ }
+ return conv.Convert(doc.Selection), nil
+}
+
+// ConvertString returns the content from a html string. If you
+// already have a goquery selection use `Convert`.
+func (conv *Converter) ConvertString(html string) (string, error) {
+ doc, err := goquery.NewDocumentFromReader(strings.NewReader(html))
+ if err != nil {
+ return "", err
+ }
+ return conv.Convert(doc.Selection), nil
+}
+
+// ConvertBytes returns the content from a html byte array.
+func (conv *Converter) ConvertBytes(bytes []byte) ([]byte, error) {
+ res, err := conv.ConvertString(string(bytes))
+ if err != nil {
+ return nil, err
+ }
+ return []byte(res), nil
+}
+
+// ConvertURL returns the content from the page with that url.
+func (conv *Converter) ConvertURL(url string) (string, error) {
+ // not using goquery.NewDocument directly because of the timeout
+ resp, err := netClient.Get(url)
+ if err != nil {
+ return "", err
+ }
+
+ if resp.StatusCode < 200 || resp.StatusCode > 299 {
+ return "", fmt.Errorf("expected a status code in the 2xx range but got %d", resp.StatusCode)
+ }
+
+ doc, err := goquery.NewDocumentFromResponse(resp)
+ if err != nil {
+ return "", err
+ }
+ domain := DomainFromURL(url)
+ if conv.domain != domain {
+ log.Printf("expected '%s' as the domain but got '%s' \n", conv.domain, domain)
+ }
+ return conv.Convert(doc.Selection), nil
+}
@@ -0,0 +1,212 @@
+package md
+
+import (
+ "bytes"
+ "log"
+ "net/url"
+ "regexp"
+ "strings"
+
+ "github.com/PuerkitoBio/goquery"
+ "golang.org/x/net/html"
+)
+
+var (
+ ruleDefault = func(content string, selec *goquery.Selection, opt *Options) *string {
+ return &content
+ }
+ ruleKeep = func(content string, selec *goquery.Selection, opt *Options) *string {
+ element := selec.Get(0)
+
+ var buf bytes.Buffer
+ err := html.Render(&buf, element)
+ if err != nil {
+ log.Println("[JohannesKaufmann/html-to-markdown] ruleKeep: error while rendering the element to html:", err)
+ return String("")
+ }
+
+ return String(buf.String())
+ }
+)
+
+var inlineElements = []string{ // -> https://developer.mozilla.org/de/docs/Web/HTML/Inline_elemente
+ "b", "big", "i", "small", "tt",
+ "abbr", "acronym", "cite", "code", "dfn", "em", "kbd", "strong", "samp", "var",
+ "a", "bdo", "br", "img", "map", "object", "q", "script", "span", "sub", "sup",
+ "button", "input", "label", "select", "textarea",
+}
+
+// IsInlineElement can be used to check wether a node name (goquery.Nodename) is
+// an html inline element and not a block element. Used in the rule for the
+// p tag to check wether the text is inside a block element.
+func IsInlineElement(e string) bool {
+ for _, element := range inlineElements {
+ if element == e {
+ return true
+ }
+ }
+ return false
+}
+
+// String is a helper function to return a pointer.
+func String(text string) *string {
+ return &text
+}
+
+// Options to customize the output. You can change stuff like
+// the character that is used for strong text.
+type Options struct {
+ // "setext" or "atx"
+ // default: "atx"
+ HeadingStyle string
+
+ // Any Thematic break
+ // default: "* * *"
+ HorizontalRule string
+
+ // "-", "+", or "*"
+ // default: "-"
+ BulletListMarker string
+
+ // "indented" or "fenced"
+ // default: "indented"
+ CodeBlockStyle string
+
+ // ``` or ~~~
+ // default: ```
+ Fence string
+
+ // _ or *
+ // default: _
+ EmDelimiter string
+
+ // ** or __
+ // default: **
+ StrongDelimiter string
+
+ // inlined or referenced
+ // default: inlined
+ LinkStyle string
+
+ // full, collapsed, or shortcut
+ // default: full
+ LinkReferenceStyle string
+
+ // basic, disabled
+ // default: basic
+ EscapeMode string
+
+ domain string
+
+ // GetAbsoluteURL parses the `rawURL` and adds the `domain` to convert relative (/page.html)
+ // urls to absolute urls (http://domain.com/page.html).
+ //
+ // The default is `DefaultGetAbsoluteURL`, unless you override it. That can also
+ // be useful if you want to proxy the images.
+ GetAbsoluteURL func(selec *goquery.Selection, rawURL string, domain string) string
+
+ // GetCodeBlockLanguage identifies the language for syntax highlighting
+ // of a code block. The default is `DefaultGetCodeBlockLanguage`, which
+ // only gets the attribute x from the selection.
+ //
+ // You can override it if you want more results, for example by using
+ // lexers.Analyse(content) from github.com/alecthomas/chroma
+ // TODO: implement
+ // GetCodeBlockLanguage func(s *goquery.Selection, content string) string
+}
+
+// DefaultGetAbsoluteURL is the default function and can be overridden through `GetAbsoluteURL` in the options.
+func DefaultGetAbsoluteURL(selec *goquery.Selection, rawURL string, domain string) string {
+ if domain == "" {
+ return rawURL
+ }
+
+ u, err := url.Parse(rawURL)
+ if err != nil {
+ // we can't do anything with this url because it is invalid
+ return rawURL
+ }
+
+ if u.Scheme == "data" {
+ // this is a data uri (for example an inline base64 image)
+ return rawURL
+ }
+
+ if u.Scheme == "" {
+ u.Scheme = "http"
+ }
+ if u.Host == "" {
+ u.Host = domain
+ }
+
+ return u.String()
+}
+
+// AdvancedResult is used for example for links. If you use LinkStyle:referenced
+// the link href is placed at the bottom of the generated markdown (Footer).
+type AdvancedResult struct {
+ Header string
+ Markdown string
+ Footer string
+}
+
+// Rule to convert certain html tags to markdown.
+// md.Rule{
+// Filter: []string{"del", "s", "strike"},
+// Replacement: func(content string, selec *goquery.Selection, opt *md.Options) *string {
+// // You need to return a pointer to a string (md.String is just a helper function).
+// // If you return nil the next function for that html element
+// // will be picked. For example you could only convert an element
+// // if it has a certain class name and fallback if not.
+// return md.String("~" + content + "~")
+// },
+// }
+type Rule struct {
+ Filter []string
+ Replacement func(content string, selec *goquery.Selection, options *Options) *string
+ AdvancedReplacement func(content string, selec *goquery.Selection, options *Options) (res AdvancedResult, skip bool)
+}
+
+var leadingNewlinesR = regexp.MustCompile(`^\n+`)
+var trailingNewlinesR = regexp.MustCompile(`\n+$`)
+
+var newlinesR = regexp.MustCompile(`\n+`)
+var tabR = regexp.MustCompile(`\t+`)
+var indentR = regexp.MustCompile(`(?m)\n`)
+
+func (conv *Converter) selecToMD(domain string, selec *goquery.Selection, opt *Options) AdvancedResult {
+ var result AdvancedResult
+
+ var builder strings.Builder
+ selec.Contents().Each(func(i int, s *goquery.Selection) {
+ name := goquery.NodeName(s)
+ rules := conv.getRuleFuncs(name)
+
+ for i := len(rules) - 1; i >= 0; i-- {
+ rule := rules[i]
+
+ content := conv.selecToMD(domain, s, opt)
+ if content.Header != "" {
+ result.Header += content.Header
+ }
+ if content.Footer != "" {
+ result.Footer += content.Footer
+ }
+
+ res, skip := rule(content.Markdown, s, opt)
+ if res.Header != "" {
+ result.Header += res.Header + "\n"
+ }
+ if res.Footer != "" {
+ result.Footer += res.Footer + "\n"
+ }
+
+ if !skip {
+ builder.WriteString(res.Markdown)
+ return
+ }
+ }
+ })
+ result.Markdown = builder.String()
+ return result
+}
@@ -0,0 +1,533 @@
+package md
+
+import (
+ "bytes"
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/PuerkitoBio/goquery"
+ "golang.org/x/net/html"
+)
+
+/*
+WARNING: The functions from this file can be used externally
+but there is no garanty that they will stay exported.
+*/
+
+// CollectText returns the text of the node and all its children
+func CollectText(n *html.Node) string {
+ text := &bytes.Buffer{}
+ collectText(n, text)
+ return text.String()
+}
+func collectText(n *html.Node, buf *bytes.Buffer) {
+ if n.Type == html.TextNode {
+ buf.WriteString(n.Data)
+ }
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ collectText(c, buf)
+ }
+}
+
+func getName(node *html.Node) string {
+ selec := &goquery.Selection{Nodes: []*html.Node{node}}
+ return goquery.NodeName(selec)
+}
+
+// What elements automatically trim their content?
+// Don't add another space if the other element is going to add a
+// space already.
+func isTrimmedElement(name string) bool {
+ nodes := []string{
+ "a",
+ "strong", "b",
+ "i", "em",
+ "del", "s", "strike",
+ "code",
+ }
+
+ for _, node := range nodes {
+ if name == node {
+ return true
+ }
+ }
+ return false
+}
+
+func getPrevNodeText(node *html.Node) (string, bool) {
+ if node == nil {
+ return "", false
+ }
+
+ for ; node != nil; node = node.PrevSibling {
+ text := CollectText(node)
+
+ name := getName(node)
+ if name == "br" {
+ return "\n", true
+ }
+
+ // if the content is empty, try our luck with the next node
+ if strings.TrimSpace(text) == "" {
+ continue
+ }
+
+ if isTrimmedElement(name) {
+ text = strings.TrimSpace(text)
+ }
+
+ return text, true
+ }
+ return "", false
+}
+func getNextNodeText(node *html.Node) (string, bool) {
+ if node == nil {
+ return "", false
+ }
+
+ for ; node != nil; node = node.NextSibling {
+ text := CollectText(node)
+
+ name := getName(node)
+ if name == "br" {
+ return "\n", true
+ }
+
+ // if the content is empty, try our luck with the next node
+ if strings.TrimSpace(text) == "" {
+ continue
+ }
+
+ // if you have "a a a", three elements that are trimmed, then only add
+ // a space to one side, since the other's are also adding a space.
+ if isTrimmedElement(name) {
+ text = " "
+ }
+
+ return text, true
+ }
+ return "", false
+}
+
+// AddSpaceIfNessesary adds spaces to the text based on the neighbors.
+// That makes sure that there is always a space to the side, to recognize the delimiter.
+func AddSpaceIfNessesary(selec *goquery.Selection, markdown string) string {
+ if len(selec.Nodes) == 0 {
+ return markdown
+ }
+ rootNode := selec.Nodes[0]
+
+ prev, hasPrev := getPrevNodeText(rootNode.PrevSibling)
+ if hasPrev {
+ lastChar, size := utf8.DecodeLastRuneInString(prev)
+ if size > 0 && !unicode.IsSpace(lastChar) {
+ markdown = " " + markdown
+ }
+ }
+
+ next, hasNext := getNextNodeText(rootNode.NextSibling)
+ if hasNext {
+ firstChar, size := utf8.DecodeRuneInString(next)
+ if size > 0 && !unicode.IsSpace(firstChar) && !unicode.IsPunct(firstChar) {
+ markdown = markdown + " "
+ }
+ }
+
+ return markdown
+}
+
+func isLineCodeDelimiter(chars []rune) bool {
+ if len(chars) < 3 {
+ return false
+ }
+
+ // TODO: If it starts with 4 (instead of 3) fence characters, we should only end it
+ // if we see the same amount of ending fence characters.
+ return chars[0] == '`' && chars[1] == '`' && chars[2] == '`'
+}
+
+// TrimpLeadingSpaces removes spaces from the beginning of a line
+// but makes sure that list items and code blocks are not affected.
+func TrimpLeadingSpaces(text string) string {
+ var insideCodeBlock bool
+
+ lines := strings.Split(text, "\n")
+ for index := range lines {
+ chars := []rune(lines[index])
+
+ if isLineCodeDelimiter(chars) {
+ if !insideCodeBlock {
+ // start the code block
+ insideCodeBlock = true
+ } else {
+ // end the code block
+ insideCodeBlock = false
+ }
+ }
+ if insideCodeBlock {
+ // We are inside a code block and don't want to
+ // disturb that formatting (e.g. python indentation)
+ continue
+ }
+
+ var spaces int
+ for i := 0; i < len(chars); i++ {
+ if unicode.IsSpace(chars[i]) {
+ if chars[i] == ' ' {
+ spaces = spaces + 4
+ } else {
+ spaces++
+ }
+ continue
+ }
+
+ // this seems to be a list item
+ if chars[i] == '-' {
+ break
+ }
+
+ // this seems to be a code block
+ if spaces >= 4 {
+ break
+ }
+
+ // remove the space characters from the string
+ chars = chars[i:]
+ break
+ }
+ lines[index] = string(chars)
+ }
+
+ return strings.Join(lines, "\n")
+}
+
+// TrimTrailingSpaces removes unnecessary spaces from the end of lines.
+func TrimTrailingSpaces(text string) string {
+ parts := strings.Split(text, "\n")
+ for i := range parts {
+ parts[i] = strings.TrimRightFunc(parts[i], func(r rune) bool {
+ return unicode.IsSpace(r)
+ })
+
+ }
+
+ return strings.Join(parts, "\n")
+}
+
+// The same as `multipleNewLinesRegex`, but applies to escaped new lines inside a link `\n\`
+var multipleNewLinesInLinkRegex = regexp.MustCompile(`(\n\\){1,}`) // `([\n\r\s]\\)`
+
+// EscapeMultiLine deals with multiline content inside a link
+func EscapeMultiLine(content string) string {
+ content = strings.TrimSpace(content)
+ content = strings.Replace(content, "\n", `\`+"\n", -1)
+
+ content = multipleNewLinesInLinkRegex.ReplaceAllString(content, "\n\\")
+
+ return content
+}
+
+func calculateCodeFenceOccurrences(fenceChar rune, content string) int {
+ var occurrences []int
+
+ var charsTogether int
+ for _, char := range content {
+ // we encountered a fence character, now count how many
+ // are directly afterwards
+ if char == fenceChar {
+ charsTogether++
+ } else if charsTogether != 0 {
+ occurrences = append(occurrences, charsTogether)
+ charsTogether = 0
+ }
+ }
+
+ // if the last element in the content was a fenceChar
+ if charsTogether != 0 {
+ occurrences = append(occurrences, charsTogether)
+ }
+
+ return findMax(occurrences)
+}
+
+// CalculateCodeFence can be passed the content of a code block and it returns
+// how many fence characters (` or ~) should be used.
+//
+// This is useful if the html content includes the same fence characters
+// for example ```
+// -> https://stackoverflow.com/a/49268657
+func CalculateCodeFence(fenceChar rune, content string) string {
+ repeat := calculateCodeFenceOccurrences(fenceChar, content)
+
+ // the outer fence block always has to have
+ // at least one character more than any content inside
+ repeat++
+
+ // you have to have at least three fence characters
+ // to be recognized as a code block
+ if repeat < 3 {
+ repeat = 3
+ }
+
+ return strings.Repeat(string(fenceChar), repeat)
+}
+
+func findMax(a []int) (max int) {
+ for i, value := range a {
+ if i == 0 {
+ max = a[i]
+ }
+
+ if value > max {
+ max = value
+ }
+ }
+ return max
+}
+
+func getCodeWithoutTags(startNode *html.Node) []byte {
+ var buf bytes.Buffer
+
+ var f func(*html.Node)
+ f = func(n *html.Node) {
+ if n.Type == html.ElementNode && (n.Data == "style" || n.Data == "script" || n.Data == "textarea") {
+ return
+ }
+ if n.Type == html.ElementNode && (n.Data == "br" || n.Data == "div") {
+ buf.WriteString("\n")
+ }
+
+ if n.Type == html.TextNode {
+ buf.WriteString(n.Data)
+ return
+ }
+
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ f(c)
+ }
+ }
+
+ f(startNode)
+
+ return buf.Bytes()
+}
+
+// getCodeContent gets the content of pre/code and unescapes the encoded characters.
+// Returns "" if there is an error.
+func getCodeContent(selec *goquery.Selection) string {
+ if len(selec.Nodes) == 0 {
+ return ""
+ }
+
+ code := getCodeWithoutTags(selec.Nodes[0])
+
+ return string(code)
+}
+
+// delimiterForEveryLine puts the delimiter not just at the start and end of the string
+// but if the text is divided on multiple lines, puts the delimiters on every line with content.
+//
+// Otherwise the bold/italic delimiters won't be recognized if it contains new line characters.
+func delimiterForEveryLine(text string, delimiter string) string {
+ lines := strings.Split(text, "\n")
+
+ for i, line := range lines {
+ line = strings.TrimSpace(line)
+ if line == "" {
+ // Skip empty lines
+ continue
+ }
+
+ lines[i] = delimiter + line + delimiter
+ }
+ return strings.Join(lines, "\n")
+}
+
+// isWrapperListItem returns wether the list item has own
+// content or is just a wrapper for another list.
+// e.g. "<li><ul>..."
+func isWrapperListItem(s *goquery.Selection) bool {
+ directText := s.Contents().Not("ul").Not("ol").Text()
+
+ noOwnText := strings.TrimSpace(directText) == ""
+ childIsList := s.ChildrenFiltered("ul").Length() > 0 || s.ChildrenFiltered("ol").Length() > 0
+
+ return noOwnText && childIsList
+}
+
+// getListStart returns the integer from which the counting
+// for for the list items should start from.
+// -> https://developer.mozilla.org/en-US/docs/Web/HTML/Element/ol#start
+func getListStart(parent *goquery.Selection) int {
+ val := parent.AttrOr("start", "")
+ if val == "" {
+ return 1
+ }
+
+ num, err := strconv.Atoi(val)
+ if err != nil {
+ return 1
+ }
+
+ if num < 0 {
+ return 1
+ }
+ return num
+}
+
+// getListPrefix returns the appropriate prefix for the list item.
+// For example "- ", "* ", "1. ", "01. ", ...
+func getListPrefix(opt *Options, s *goquery.Selection) string {
+ if isWrapperListItem(s) {
+ return ""
+ }
+
+ parent := s.Parent()
+ if parent.Is("ul") {
+ return opt.BulletListMarker + " "
+ } else if parent.Is("ol") {
+ start := getListStart(parent)
+ currentIndex := start + s.Index()
+
+ lastIndex := parent.Children().Last().Index() + 1
+ maxLength := len(strconv.Itoa(lastIndex))
+
+ // pad the numbers so that all prefix numbers in the list take up the same space
+ // `%02d.` -> "01. "
+ format := `%0` + strconv.Itoa(maxLength) + `d. `
+ return fmt.Sprintf(format, currentIndex)
+ }
+ // If the HTML is malformed and the list element isn't in a ul or ol, return no prefix
+ return ""
+}
+
+// countListParents counts how much space is reserved for the prefixes at all the parent lists.
+// This is useful to calculate the correct level of indentation for nested lists.
+func countListParents(opt *Options, selec *goquery.Selection) (int, int) {
+ var values []int
+ for n := selec.Parent(); n != nil; n = n.Parent() {
+ if n.Is("li") {
+ continue
+ }
+ if !n.Is("ul") && !n.Is("ol") {
+ break
+ }
+
+ prefix := n.Children().First().AttrOr(attrListPrefix, "")
+
+ values = append(values, len(prefix))
+ }
+
+ // how many spaces are reserved for the prefixes of my siblings
+ var prefixCount int
+
+ // how many spaces are reserved in total for all of the other
+ // list parents up the tree
+ var previousPrefixCounts int
+
+ for i, val := range values {
+ if i == 0 {
+ prefixCount = val
+ continue
+ }
+
+ previousPrefixCounts += val
+ }
+
+ return prefixCount, previousPrefixCounts
+}
+
+// IndentMultiLineListItem makes sure that multiline list items
+// are properly indented.
+func IndentMultiLineListItem(opt *Options, text string, spaces int) string {
+ parts := strings.Split(text, "\n")
+ for i := range parts {
+ // dont touch the first line since its indented through the prefix
+ if i == 0 {
+ continue
+ }
+
+ if isListItem(opt, parts[i]) {
+ return strings.Join(parts, "\n")
+ }
+
+ indent := strings.Repeat(" ", spaces)
+ parts[i] = indent + parts[i]
+ }
+
+ return strings.Join(parts, "\n")
+}
+
+// isListItem checks wether the line is a markdown list item
+func isListItem(opt *Options, line string) bool {
+ b := []rune(line)
+
+ bulletMarker := []rune(opt.BulletListMarker)[0]
+
+ var hasNumber bool
+ var hasMarker bool
+ var hasSpace bool
+
+ for i := 0; i < len(b); i++ {
+ // A marker followed by a space qualifies as a list item
+ if hasMarker && hasSpace {
+ if b[i] == bulletMarker {
+ // But if another BulletListMarker is found, it
+ // might be a HorizontalRule
+ return false
+ }
+
+ if !unicode.IsSpace(b[i]) {
+ // Now we have some text
+ return true
+ }
+ }
+
+ if hasMarker {
+ if unicode.IsSpace(b[i]) {
+ hasSpace = true
+ continue
+ }
+ // A marker like "1." that is not immediately followed by a space
+ // is probably a false positive
+ return false
+ }
+
+ if b[i] == bulletMarker {
+ hasMarker = true
+ continue
+ }
+
+ if hasNumber && b[i] == '.' {
+ hasMarker = true
+ continue
+ }
+ if unicode.IsDigit(b[i]) {
+ hasNumber = true
+ continue
+ }
+
+ if unicode.IsSpace(b[i]) {
+ continue
+ }
+
+ // If we encouter any other character
+ // before finding an indicator, its
+ // not a list item
+ return false
+ }
+ return false
+}
+
+// IndexWithText is similar to goquery's Index function but
+// returns the index of the current element while
+// NOT counting the empty elements beforehand.
+func IndexWithText(s *goquery.Selection) int {
+ return s.PrevAll().FilterFunction(func(i int, s *goquery.Selection) bool {
+ return strings.TrimSpace(s.Text()) != ""
+ }).Length()
+}
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014-2019 TSUYUSATO Kitsune
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
@@ -0,0 +1,52 @@
+# heredoc
+
+[](https://circleci.com/gh/MakeNowJust/heredoc) [](https://godoc.org/github.com/MakeNowJust/heredoc)
+
+## About
+
+Package heredoc provides the here-document with keeping indent.
+
+## Install
+
+```console
+$ go get github.com/MakeNowJust/heredoc
+```
+
+## Import
+
+```go
+// usual
+import "github.com/MakeNowJust/heredoc"
+```
+
+## Example
+
+```go
+package main
+
+import (
+ "fmt"
+ "github.com/MakeNowJust/heredoc"
+)
+
+func main() {
+ fmt.Println(heredoc.Doc(`
+ Lorem ipsum dolor sit amet, consectetur adipisicing elit,
+ sed do eiusmod tempor incididunt ut labore et dolore magna
+ aliqua. Ut enim ad minim veniam, ...
+ `))
+ // Output:
+ // Lorem ipsum dolor sit amet, consectetur adipisicing elit,
+ // sed do eiusmod tempor incididunt ut labore et dolore magna
+ // aliqua. Ut enim ad minim veniam, ...
+ //
+}
+```
+
+## API Document
+
+ - [heredoc - GoDoc](https://godoc.org/github.com/MakeNowJust/heredoc)
+
+## License
+
+This software is released under the MIT License, see LICENSE.
@@ -0,0 +1,105 @@
+// Copyright (c) 2014-2019 TSUYUSATO Kitsune
+// This software is released under the MIT License.
+// http://opensource.org/licenses/mit-license.php
+
+// Package heredoc provides creation of here-documents from raw strings.
+//
+// Golang supports raw-string syntax.
+//
+// doc := `
+// Foo
+// Bar
+// `
+//
+// But raw-string cannot recognize indentation. Thus such content is an indented string, equivalent to
+//
+// "\n\tFoo\n\tBar\n"
+//
+// I dont't want this!
+//
+// However this problem is solved by package heredoc.
+//
+// doc := heredoc.Doc(`
+// Foo
+// Bar
+// `)
+//
+// Is equivalent to
+//
+// "Foo\nBar\n"
+package heredoc
+
+import (
+ "fmt"
+ "strings"
+ "unicode"
+)
+
+const maxInt = int(^uint(0) >> 1)
+
+// Doc returns un-indented string as here-document.
+func Doc(raw string) string {
+ skipFirstLine := false
+ if len(raw) > 0 && raw[0] == '\n' {
+ raw = raw[1:]
+ } else {
+ skipFirstLine = true
+ }
+
+ lines := strings.Split(raw, "\n")
+
+ minIndentSize := getMinIndent(lines, skipFirstLine)
+ lines = removeIndentation(lines, minIndentSize, skipFirstLine)
+
+ return strings.Join(lines, "\n")
+}
+
+// getMinIndent calculates the minimum indentation in lines, excluding empty lines.
+func getMinIndent(lines []string, skipFirstLine bool) int {
+ minIndentSize := maxInt
+
+ for i, line := range lines {
+ if i == 0 && skipFirstLine {
+ continue
+ }
+
+ indentSize := 0
+ for _, r := range []rune(line) {
+ if unicode.IsSpace(r) {
+ indentSize += 1
+ } else {
+ break
+ }
+ }
+
+ if len(line) == indentSize {
+ if i == len(lines)-1 && indentSize < minIndentSize {
+ lines[i] = ""
+ }
+ } else if indentSize < minIndentSize {
+ minIndentSize = indentSize
+ }
+ }
+ return minIndentSize
+}
+
+// removeIndentation removes n characters from the front of each line in lines.
+// Skips first line if skipFirstLine is true, skips empty lines.
+func removeIndentation(lines []string, n int, skipFirstLine bool) []string {
+ for i, line := range lines {
+ if i == 0 && skipFirstLine {
+ continue
+ }
+
+ if len(lines[i]) >= n {
+ lines[i] = line[n:]
+ }
+ }
+ return lines
+}
+
+// Docf returns unindented and formatted string as here-document.
+// Formatting is done as for fmt.Printf().
+func Docf(raw string, args ...interface{}) string {
+ return fmt.Sprintf(Doc(raw), args...)
+}
@@ -0,0 +1 @@
+testdata/* linguist-vendored
@@ -0,0 +1,16 @@
+# editor temporary files
+*.sublime-*
+.DS_Store
+*.swp
+#*.*#
+tags
+
+# direnv config
+.env*
+
+# test binaries
+*.test
+
+# coverage and profilte outputs
+*.out
+
@@ -0,0 +1,12 @@
+Copyright (c) 2012-2021, Martin Angers & Contributors
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+
+* Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@@ -0,0 +1,202 @@
+# goquery - a little like that j-thing, only in Go
+
+[](https://github.com/PuerkitoBio/goquery/actions)
+[](https://pkg.go.dev/github.com/PuerkitoBio/goquery)
+[](https://sourcegraph.com/github.com/PuerkitoBio/goquery?badge)
+
+goquery brings a syntax and a set of features similar to [jQuery][] to the [Go language][go]. It is based on Go's [net/html package][html] and the CSS Selector library [cascadia][]. Since the net/html parser returns nodes, and not a full-featured DOM tree, jQuery's stateful manipulation functions (like height(), css(), detach()) have been left off.
+
+Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML. See the [wiki][] for various options to do this.
+
+Syntax-wise, it is as close as possible to jQuery, with the same function names when possible, and that warm and fuzzy chainable interface. jQuery being the ultra-popular library that it is, I felt that writing a similar HTML-manipulating library was better to follow its API than to start anew (in the same spirit as Go's `fmt` package), even though some of its methods are less than intuitive (looking at you, [index()][index]...).
+
+## Table of Contents
+
+* [Installation](#installation)
+* [Changelog](#changelog)
+* [API](#api)
+* [Examples](#examples)
+* [Related Projects](#related-projects)
+* [Support](#support)
+* [License](#license)
+
+## Installation
+
+Please note that starting with version `v1.9.0` of goquery, Go 1.18+ is required due to the use of generics. For previous goquery versions, a Go version of 1.1+ was required because of the `net/html` dependency. Ongoing goquery development is tested on the latest 2 versions of Go.
+
+ $ go get github.com/PuerkitoBio/goquery
+
+(optional) To run unit tests:
+
+ $ cd $GOPATH/src/github.com/PuerkitoBio/goquery
+ $ go test
+
+(optional) To run benchmarks (warning: it runs for a few minutes):
+
+ $ cd $GOPATH/src/github.com/PuerkitoBio/goquery
+ $ go test -bench=".*"
+
+## Changelog
+
+**Note that goquery's API is now stable, and will not break.**
+
+* **2024-04-29 (v1.9.2)** : Update `go.mod` dependencies.
+* **2024-02-29 (v1.9.1)** : Improve allocation and performance of the `Map` function and `Selection.Map` method, better document the cascadia differences (thanks [@jwilsson](https://github.com/jwilsson)).
+* **2024-02-22 (v1.9.0)** : Add a generic `Map` function, **goquery now requires Go version 1.18+** (thanks [@Fesaa](https://github.com/Fesaa)).
+* **2023-02-18 (v1.8.1)** : Update `go.mod` dependencies, update CI workflow.
+* **2021-10-25 (v1.8.0)** : Add `Render` function to render a `Selection` to an `io.Writer` (thanks [@anthonygedeon](https://github.com/anthonygedeon)).
+* **2021-07-11 (v1.7.1)** : Update go.mod dependencies and add dependabot config (thanks [@jauderho](https://github.com/jauderho)).
+* **2021-06-14 (v1.7.0)** : Add `Single` and `SingleMatcher` functions to optimize first-match selection (thanks [@gdollardollar](https://github.com/gdollardollar)).
+* **2021-01-11 (v1.6.1)** : Fix panic when calling `{Prepend,Append,Set}Html` on a `Selection` that contains non-Element nodes.
+* **2020-10-08 (v1.6.0)** : Parse html in context of the container node for all functions that deal with html strings (`AfterHtml`, `AppendHtml`, etc.). Thanks to [@thiemok][thiemok] and [@davidjwilkins][djw] for their work on this.
+* **2020-02-04 (v1.5.1)** : Update module dependencies.
+* **2018-11-15 (v1.5.0)** : Go module support (thanks @Zaba505).
+* **2018-06-07 (v1.4.1)** : Add `NewDocumentFromReader` examples.
+* **2018-03-24 (v1.4.0)** : Deprecate `NewDocument(url)` and `NewDocumentFromResponse(response)`.
+* **2018-01-28 (v1.3.0)** : Add `ToEnd` constant to `Slice` until the end of the selection (thanks to @davidjwilkins for raising the issue).
+* **2018-01-11 (v1.2.0)** : Add `AddBack*` and deprecate `AndSelf` (thanks to @davidjwilkins).
+* **2017-02-12 (v1.1.0)** : Add `SetHtml` and `SetText` (thanks to @glebtv).
+* **2016-12-29 (v1.0.2)** : Optimize allocations for `Selection.Text` (thanks to @radovskyb).
+* **2016-08-28 (v1.0.1)** : Optimize performance for large documents.
+* **2016-07-27 (v1.0.0)** : Tag version 1.0.0.
+* **2016-06-15** : Invalid selector strings internally compile to a `Matcher` implementation that never matches any node (instead of a panic). So for example, `doc.Find("~")` returns an empty `*Selection` object.
+* **2016-02-02** : Add `NodeName` utility function similar to the DOM's `nodeName` property. It returns the tag name of the first element in a selection, and other relevant values of non-element nodes (see [doc][] for details). Add `OuterHtml` utility function similar to the DOM's `outerHTML` property (named `OuterHtml` in small caps for consistency with the existing `Html` method on the `Selection`).
+* **2015-04-20** : Add `AttrOr` helper method to return the attribute's value or a default value if absent. Thanks to [piotrkowalczuk][piotr].
+* **2015-02-04** : Add more manipulation functions - Prepend* - thanks again to [Andrew Stone][thatguystone].
+* **2014-11-28** : Add more manipulation functions - ReplaceWith*, Wrap* and Unwrap - thanks again to [Andrew Stone][thatguystone].
+* **2014-11-07** : Add manipulation functions (thanks to [Andrew Stone][thatguystone]) and `*Matcher` functions, that receive compiled cascadia selectors instead of selector strings, thus avoiding potential panics thrown by goquery via `cascadia.MustCompile` calls. This results in better performance (selectors can be compiled once and reused) and more idiomatic error handling (you can handle cascadia's compilation errors, instead of recovering from panics, which had been bugging me for a long time). Note that the actual type expected is a `Matcher` interface, that `cascadia.Selector` implements. Other matcher implementations could be used.
+* **2014-11-06** : Change import paths of net/html to golang.org/x/net/html (see https://groups.google.com/forum/#!topic/golang-nuts/eD8dh3T9yyA). Make sure to update your code to use the new import path too when you call goquery with `html.Node`s.
+* **v0.3.2** : Add `NewDocumentFromReader()` (thanks jweir) which allows creating a goquery document from an io.Reader.
+* **v0.3.1** : Add `NewDocumentFromResponse()` (thanks assassingj) which allows creating a goquery document from an http response.
+* **v0.3.0** : Add `EachWithBreak()` which allows to break out of an `Each()` loop by returning false. This function was added instead of changing the existing `Each()` to avoid breaking compatibility.
+* **v0.2.1** : Make go-getable, now that [go.net/html is Go1.0-compatible][gonet] (thanks to @matrixik for pointing this out).
+* **v0.2.0** : Add support for negative indices in Slice(). **BREAKING CHANGE** `Document.Root` is removed, `Document` is now a `Selection` itself (a selection of one, the root element, just like `Document.Root` was before). Add jQuery's Closest() method.
+* **v0.1.1** : Add benchmarks to use as baseline for refactorings, refactor Next...() and Prev...() methods to use the new html package's linked list features (Next/PrevSibling, FirstChild). Good performance boost (40+% in some cases).
+* **v0.1.0** : Initial release.
+
+## API
+
+goquery exposes two structs, `Document` and `Selection`, and the `Matcher` interface. Unlike jQuery, which is loaded as part of a DOM document, and thus acts on its containing document, goquery doesn't know which HTML document to act upon. So it needs to be told, and that's what the `Document` type is for. It holds the root document node as the initial Selection value to manipulate.
+
+jQuery often has many variants for the same function (no argument, a selector string argument, a jQuery object argument, a DOM element argument, ...). Instead of exposing the same features in goquery as a single method with variadic empty interface arguments, statically-typed signatures are used following this naming convention:
+
+* When the jQuery equivalent can be called with no argument, it has the same name as jQuery for the no argument signature (e.g.: `Prev()`), and the version with a selector string argument is called `XxxFiltered()` (e.g.: `PrevFiltered()`)
+* When the jQuery equivalent **requires** one argument, the same name as jQuery is used for the selector string version (e.g.: `Is()`)
+* The signatures accepting a jQuery object as argument are defined in goquery as `XxxSelection()` and take a `*Selection` object as argument (e.g.: `FilterSelection()`)
+* The signatures accepting a DOM element as argument in jQuery are defined in goquery as `XxxNodes()` and take a variadic argument of type `*html.Node` (e.g.: `FilterNodes()`)
+* The signatures accepting a function as argument in jQuery are defined in goquery as `XxxFunction()` and take a function as argument (e.g.: `FilterFunction()`)
+* The goquery methods that can be called with a selector string have a corresponding version that take a `Matcher` interface and are defined as `XxxMatcher()` (e.g.: `IsMatcher()`)
+
+Utility functions that are not in jQuery but are useful in Go are implemented as functions (that take a `*Selection` as parameter), to avoid a potential naming clash on the `*Selection`'s methods (reserved for jQuery-equivalent behaviour).
+
+The complete [package reference documentation can be found here][doc].
+
+Please note that Cascadia's selectors do not necessarily match all supported selectors of jQuery (Sizzle). See the [cascadia project][cascadia] for details. Also, the selectors work more like the DOM's `querySelectorAll`, than jQuery's matchers - they have no concept of contextual matching (for some concrete examples of what that means, see [this ticket](https://github.com/andybalholm/cascadia/issues/61)). In practice, it doesn't matter very often but it's something worth mentioning. Invalid selector strings compile to a `Matcher` that fails to match any node. Behaviour of the various functions that take a selector string as argument follows from that fact, e.g. (where `~` is an invalid selector string):
+
+* `Find("~")` returns an empty selection because the selector string doesn't match anything.
+* `Add("~")` returns a new selection that holds the same nodes as the original selection, because it didn't add any node (selector string didn't match anything).
+* `ParentsFiltered("~")` returns an empty selection because the selector string doesn't match anything.
+* `ParentsUntil("~")` returns all parents of the selection because the selector string didn't match any element to stop before the top element.
+
+## Examples
+
+See some tips and tricks in the [wiki][].
+
+Adapted from example_test.go:
+
+```Go
+package main
+
+import (
+ "fmt"
+ "log"
+ "net/http"
+
+ "github.com/PuerkitoBio/goquery"
+)
+
+func ExampleScrape() {
+ // Request the HTML page.
+ res, err := http.Get("http://metalsucks.net")
+ if err != nil {
+ log.Fatal(err)
+ }
+ defer res.Body.Close()
+ if res.StatusCode != 200 {
+ log.Fatalf("status code error: %d %s", res.StatusCode, res.Status)
+ }
+
+ // Load the HTML document
+ doc, err := goquery.NewDocumentFromReader(res.Body)
+ if err != nil {
+ log.Fatal(err)
+ }
+
+ // Find the review items
+ doc.Find(".left-content article .post-title").Each(func(i int, s *goquery.Selection) {
+ // For each item found, get the title
+ title := s.Find("a").Text()
+ fmt.Printf("Review %d: %s\n", i, title)
+ })
+}
+
+func main() {
+ ExampleScrape()
+}
+```
+
+## Related Projects
+
+- [Goq][goq], an HTML deserialization and scraping library based on goquery and struct tags.
+- [andybalholm/cascadia][cascadia], the CSS selector library used by goquery.
+- [suntong/cascadia][cascadiacli], a command-line interface to the cascadia CSS selector library, useful to test selectors.
+- [gocolly/colly](https://github.com/gocolly/colly), a lightning fast and elegant Scraping Framework
+- [gnulnx/goperf](https://github.com/gnulnx/goperf), a website performance test tool that also fetches static assets.
+- [MontFerret/ferret](https://github.com/MontFerret/ferret), declarative web scraping.
+- [tacusci/berrycms](https://github.com/tacusci/berrycms), a modern simple to use CMS with easy to write plugins
+- [Dataflow kit](https://github.com/slotix/dataflowkit), Web Scraping framework for Gophers.
+- [Geziyor](https://github.com/geziyor/geziyor), a fast web crawling & scraping framework for Go. Supports JS rendering.
+- [Pagser](https://github.com/foolin/pagser), a simple, easy, extensible, configurable HTML parser to struct based on goquery and struct tags.
+- [stitcherd](https://github.com/vhodges/stitcherd), A server for doing server side includes using css selectors and DOM updates.
+- [goskyr](https://github.com/jakopako/goskyr), an easily configurable command-line scraper written in Go.
+- [goGetJS](https://github.com/davemolk/goGetJS), a tool for extracting, searching, and saving JavaScript files (with optional headless browser).
+- [fitter](https://github.com/PxyUp/fitter), a tool for selecting values from JSON, XML, HTML and XPath formatted pages.
+
+## Support
+
+There are a number of ways you can support the project:
+
+* Use it, star it, build something with it, spread the word!
+ - If you do build something open-source or otherwise publicly-visible, let me know so I can add it to the [Related Projects](#related-projects) section!
+* Raise issues to improve the project (note: doc typos and clarifications are issues too!)
+ - Please search existing issues before opening a new one - it may have already been addressed.
+* Pull requests: please discuss new code in an issue first, unless the fix is really trivial.
+ - Make sure new code is tested.
+ - Be mindful of existing code - PRs that break existing code have a high probability of being declined, unless it fixes a serious issue.
+* Sponsor the developer
+ - See the Github Sponsor button at the top of the repo on github
+ - or via BuyMeACoffee.com, below
+
+<a href="https://www.buymeacoffee.com/mna" target="_blank"><img src="https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png" alt="Buy Me A Coffee" style="height: 41px !important;width: 174px !important;box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;-webkit-box-shadow: 0px 3px 2px 0px rgba(190, 190, 190, 0.5) !important;" ></a>
+
+## License
+
+The [BSD 3-Clause license][bsd], the same as the [Go language][golic]. Cascadia's license is [here][caslic].
+
+[jquery]: http://jquery.com/
+[go]: http://golang.org/
+[cascadia]: https://github.com/andybalholm/cascadia
+[cascadiacli]: https://github.com/suntong/cascadia
+[bsd]: http://opensource.org/licenses/BSD-3-Clause
+[golic]: http://golang.org/LICENSE
+[caslic]: https://github.com/andybalholm/cascadia/blob/master/LICENSE
+[doc]: https://pkg.go.dev/github.com/PuerkitoBio/goquery
+[index]: http://api.jquery.com/index/
+[gonet]: https://github.com/golang/net/
+[html]: https://pkg.go.dev/golang.org/x/net/html
+[wiki]: https://github.com/PuerkitoBio/goquery/wiki/Tips-and-tricks
+[thatguystone]: https://github.com/thatguystone
+[piotr]: https://github.com/piotrkowalczuk
+[goq]: https://github.com/andrewstuart/goq
+[thiemok]: https://github.com/thiemok
+[djw]: https://github.com/davidjwilkins
@@ -0,0 +1,124 @@
+package goquery
+
+import (
+ "golang.org/x/net/html"
+)
+
+const (
+ maxUint = ^uint(0)
+ maxInt = int(maxUint >> 1)
+
+ // ToEnd is a special index value that can be used as end index in a call
+ // to Slice so that all elements are selected until the end of the Selection.
+ // It is equivalent to passing (*Selection).Length().
+ ToEnd = maxInt
+)
+
+// First reduces the set of matched elements to the first in the set.
+// It returns a new Selection object, and an empty Selection object if the
+// the selection is empty.
+func (s *Selection) First() *Selection {
+ return s.Eq(0)
+}
+
+// Last reduces the set of matched elements to the last in the set.
+// It returns a new Selection object, and an empty Selection object if
+// the selection is empty.
+func (s *Selection) Last() *Selection {
+ return s.Eq(-1)
+}
+
+// Eq reduces the set of matched elements to the one at the specified index.
+// If a negative index is given, it counts backwards starting at the end of the
+// set. It returns a new Selection object, and an empty Selection object if the
+// index is invalid.
+func (s *Selection) Eq(index int) *Selection {
+ if index < 0 {
+ index += len(s.Nodes)
+ }
+
+ if index >= len(s.Nodes) || index < 0 {
+ return newEmptySelection(s.document)
+ }
+
+ return s.Slice(index, index+1)
+}
+
+// Slice reduces the set of matched elements to a subset specified by a range
+// of indices. The start index is 0-based and indicates the index of the first
+// element to select. The end index is 0-based and indicates the index at which
+// the elements stop being selected (the end index is not selected).
+//
+// The indices may be negative, in which case they represent an offset from the
+// end of the selection.
+//
+// The special value ToEnd may be specified as end index, in which case all elements
+// until the end are selected. This works both for a positive and negative start
+// index.
+func (s *Selection) Slice(start, end int) *Selection {
+ if start < 0 {
+ start += len(s.Nodes)
+ }
+ if end == ToEnd {
+ end = len(s.Nodes)
+ } else if end < 0 {
+ end += len(s.Nodes)
+ }
+ return pushStack(s, s.Nodes[start:end])
+}
+
+// Get retrieves the underlying node at the specified index.
+// Get without parameter is not implemented, since the node array is available
+// on the Selection object.
+func (s *Selection) Get(index int) *html.Node {
+ if index < 0 {
+ index += len(s.Nodes) // Negative index gets from the end
+ }
+ return s.Nodes[index]
+}
+
+// Index returns the position of the first element within the Selection object
+// relative to its sibling elements.
+func (s *Selection) Index() int {
+ if len(s.Nodes) > 0 {
+ return newSingleSelection(s.Nodes[0], s.document).PrevAll().Length()
+ }
+ return -1
+}
+
+// IndexSelector returns the position of the first element within the
+// Selection object relative to the elements matched by the selector, or -1 if
+// not found.
+func (s *Selection) IndexSelector(selector string) int {
+ if len(s.Nodes) > 0 {
+ sel := s.document.Find(selector)
+ return indexInSlice(sel.Nodes, s.Nodes[0])
+ }
+ return -1
+}
+
+// IndexMatcher returns the position of the first element within the
+// Selection object relative to the elements matched by the matcher, or -1 if
+// not found.
+func (s *Selection) IndexMatcher(m Matcher) int {
+ if len(s.Nodes) > 0 {
+ sel := s.document.FindMatcher(m)
+ return indexInSlice(sel.Nodes, s.Nodes[0])
+ }
+ return -1
+}
+
+// IndexOfNode returns the position of the specified node within the Selection
+// object, or -1 if not found.
+func (s *Selection) IndexOfNode(node *html.Node) int {
+ return indexInSlice(s.Nodes, node)
+}
+
+// IndexOfSelection returns the position of the first node in the specified
+// Selection object within this Selection object, or -1 if not found.
+func (s *Selection) IndexOfSelection(sel *Selection) int {
+ if sel != nil && len(sel.Nodes) > 0 {
+ return indexInSlice(s.Nodes, sel.Nodes[0])
+ }
+ return -1
+}
@@ -0,0 +1,123 @@
+// Copyright (c) 2012-2016, Martin Angers & Contributors
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without modification,
+// are permitted provided that the following conditions are met:
+//
+// * Redistributions of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation and/or
+// other materials provided with the distribution.
+// * Neither the name of the author nor the names of its contributors may be used to
+// endorse or promote products derived from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
+// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
+// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+// WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY
+// WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+/*
+Package goquery implements features similar to jQuery, including the chainable
+syntax, to manipulate and query an HTML document.
+
+It brings a syntax and a set of features similar to jQuery to the Go language.
+It is based on Go's net/html package and the CSS Selector library cascadia.
+Since the net/html parser returns nodes, and not a full-featured DOM
+tree, jQuery's stateful manipulation functions (like height(), css(), detach())
+have been left off.
+
+Also, because the net/html parser requires UTF-8 encoding, so does goquery: it is
+the caller's responsibility to ensure that the source document provides UTF-8 encoded HTML.
+See the repository's wiki for various options on how to do this.
+
+Syntax-wise, it is as close as possible to jQuery, with the same method names when
+possible, and that warm and fuzzy chainable interface. jQuery being the
+ultra-popular library that it is, writing a similar HTML-manipulating
+library was better to follow its API than to start anew (in the same spirit as
+Go's fmt package), even though some of its methods are less than intuitive (looking
+at you, index()...).
+
+It is hosted on GitHub, along with additional documentation in the README.md
+file: https://github.com/puerkitobio/goquery
+
+Please note that because of the net/html dependency, goquery requires Go1.1+.
+
+The various methods are split into files based on the category of behavior.
+The three dots (...) indicate that various "overloads" are available.
+
+* array.go : array-like positional manipulation of the selection.
+ - Eq()
+ - First()
+ - Get()
+ - Index...()
+ - Last()
+ - Slice()
+
+* expand.go : methods that expand or augment the selection's set.
+ - Add...()
+ - AndSelf()
+ - Union(), which is an alias for AddSelection()
+
+* filter.go : filtering methods, that reduce the selection's set.
+ - End()
+ - Filter...()
+ - Has...()
+ - Intersection(), which is an alias of FilterSelection()
+ - Not...()
+
+* iteration.go : methods to loop over the selection's nodes.
+ - Each()
+ - EachWithBreak()
+ - Map()
+
+* manipulation.go : methods for modifying the document
+ - After...()
+ - Append...()
+ - Before...()
+ - Clone()
+ - Empty()
+ - Prepend...()
+ - Remove...()
+ - ReplaceWith...()
+ - Unwrap()
+ - Wrap...()
+ - WrapAll...()
+ - WrapInner...()
+
+* property.go : methods that inspect and get the node's properties values.
+ - Attr*(), RemoveAttr(), SetAttr()
+ - AddClass(), HasClass(), RemoveClass(), ToggleClass()
+ - Html()
+ - Length()
+ - Size(), which is an alias for Length()
+ - Text()
+
+* query.go : methods that query, or reflect, a node's identity.
+ - Contains()
+ - Is...()
+
+* traversal.go : methods to traverse the HTML document tree.
+ - Children...()
+ - Contents()
+ - Find...()
+ - Next...()
+ - Parent[s]...()
+ - Prev...()
+ - Siblings...()
+
+* type.go : definition of the types exposed by goquery.
+ - Document
+ - Selection
+ - Matcher
+
+* utilities.go : definition of helper functions (and not methods on a *Selection)
+that are not part of jQuery, but are useful to goquery.
+ - NodeName
+ - OuterHtml
+*/
+package goquery
@@ -0,0 +1,70 @@
+package goquery
+
+import "golang.org/x/net/html"
+
+// Add adds the selector string's matching nodes to those in the current
+// selection and returns a new Selection object.
+// The selector string is run in the context of the document of the current
+// Selection object.
+func (s *Selection) Add(selector string) *Selection {
+ return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, compileMatcher(selector))...)
+}
+
+// AddMatcher adds the matcher's matching nodes to those in the current
+// selection and returns a new Selection object.
+// The matcher is run in the context of the document of the current
+// Selection object.
+func (s *Selection) AddMatcher(m Matcher) *Selection {
+ return s.AddNodes(findWithMatcher([]*html.Node{s.document.rootNode}, m)...)
+}
+
+// AddSelection adds the specified Selection object's nodes to those in the
+// current selection and returns a new Selection object.
+func (s *Selection) AddSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.AddNodes()
+ }
+ return s.AddNodes(sel.Nodes...)
+}
+
+// Union is an alias for AddSelection.
+func (s *Selection) Union(sel *Selection) *Selection {
+ return s.AddSelection(sel)
+}
+
+// AddNodes adds the specified nodes to those in the
+// current selection and returns a new Selection object.
+func (s *Selection) AddNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, appendWithoutDuplicates(s.Nodes, nodes, nil))
+}
+
+// AndSelf adds the previous set of elements on the stack to the current set.
+// It returns a new Selection object containing the current Selection combined
+// with the previous one.
+// Deprecated: This function has been deprecated and is now an alias for AddBack().
+func (s *Selection) AndSelf() *Selection {
+ return s.AddBack()
+}
+
+// AddBack adds the previous set of elements on the stack to the current set.
+// It returns a new Selection object containing the current Selection combined
+// with the previous one.
+func (s *Selection) AddBack() *Selection {
+ return s.AddSelection(s.prevSel)
+}
+
+// AddBackFiltered reduces the previous set of elements on the stack to those that
+// match the selector string, and adds them to the current set.
+// It returns a new Selection object containing the current Selection combined
+// with the filtered previous one
+func (s *Selection) AddBackFiltered(selector string) *Selection {
+ return s.AddSelection(s.prevSel.Filter(selector))
+}
+
+// AddBackMatcher reduces the previous set of elements on the stack to those that match
+// the mateher, and adds them to the curernt set.
+// It returns a new Selection object containing the current Selection combined
+// with the filtered previous one
+func (s *Selection) AddBackMatcher(m Matcher) *Selection {
+ return s.AddSelection(s.prevSel.FilterMatcher(m))
+}
@@ -0,0 +1,163 @@
+package goquery
+
+import "golang.org/x/net/html"
+
+// Filter reduces the set of matched elements to those that match the selector string.
+// It returns a new Selection object for this subset of matching elements.
+func (s *Selection) Filter(selector string) *Selection {
+ return s.FilterMatcher(compileMatcher(selector))
+}
+
+// FilterMatcher reduces the set of matched elements to those that match
+// the given matcher. It returns a new Selection object for this subset
+// of matching elements.
+func (s *Selection) FilterMatcher(m Matcher) *Selection {
+ return pushStack(s, winnow(s, m, true))
+}
+
+// Not removes elements from the Selection that match the selector string.
+// It returns a new Selection object with the matching elements removed.
+func (s *Selection) Not(selector string) *Selection {
+ return s.NotMatcher(compileMatcher(selector))
+}
+
+// NotMatcher removes elements from the Selection that match the given matcher.
+// It returns a new Selection object with the matching elements removed.
+func (s *Selection) NotMatcher(m Matcher) *Selection {
+ return pushStack(s, winnow(s, m, false))
+}
+
+// FilterFunction reduces the set of matched elements to those that pass the function's test.
+// It returns a new Selection object for this subset of elements.
+func (s *Selection) FilterFunction(f func(int, *Selection) bool) *Selection {
+ return pushStack(s, winnowFunction(s, f, true))
+}
+
+// NotFunction removes elements from the Selection that pass the function's test.
+// It returns a new Selection object with the matching elements removed.
+func (s *Selection) NotFunction(f func(int, *Selection) bool) *Selection {
+ return pushStack(s, winnowFunction(s, f, false))
+}
+
+// FilterNodes reduces the set of matched elements to those that match the specified nodes.
+// It returns a new Selection object for this subset of elements.
+func (s *Selection) FilterNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, winnowNodes(s, nodes, true))
+}
+
+// NotNodes removes elements from the Selection that match the specified nodes.
+// It returns a new Selection object with the matching elements removed.
+func (s *Selection) NotNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, winnowNodes(s, nodes, false))
+}
+
+// FilterSelection reduces the set of matched elements to those that match a
+// node in the specified Selection object.
+// It returns a new Selection object for this subset of elements.
+func (s *Selection) FilterSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return pushStack(s, winnowNodes(s, nil, true))
+ }
+ return pushStack(s, winnowNodes(s, sel.Nodes, true))
+}
+
+// NotSelection removes elements from the Selection that match a node in the specified
+// Selection object. It returns a new Selection object with the matching elements removed.
+func (s *Selection) NotSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return pushStack(s, winnowNodes(s, nil, false))
+ }
+ return pushStack(s, winnowNodes(s, sel.Nodes, false))
+}
+
+// Intersection is an alias for FilterSelection.
+func (s *Selection) Intersection(sel *Selection) *Selection {
+ return s.FilterSelection(sel)
+}
+
+// Has reduces the set of matched elements to those that have a descendant
+// that matches the selector.
+// It returns a new Selection object with the matching elements.
+func (s *Selection) Has(selector string) *Selection {
+ return s.HasSelection(s.document.Find(selector))
+}
+
+// HasMatcher reduces the set of matched elements to those that have a descendant
+// that matches the matcher.
+// It returns a new Selection object with the matching elements.
+func (s *Selection) HasMatcher(m Matcher) *Selection {
+ return s.HasSelection(s.document.FindMatcher(m))
+}
+
+// HasNodes reduces the set of matched elements to those that have a
+// descendant that matches one of the nodes.
+// It returns a new Selection object with the matching elements.
+func (s *Selection) HasNodes(nodes ...*html.Node) *Selection {
+ return s.FilterFunction(func(_ int, sel *Selection) bool {
+ // Add all nodes that contain one of the specified nodes
+ for _, n := range nodes {
+ if sel.Contains(n) {
+ return true
+ }
+ }
+ return false
+ })
+}
+
+// HasSelection reduces the set of matched elements to those that have a
+// descendant that matches one of the nodes of the specified Selection object.
+// It returns a new Selection object with the matching elements.
+func (s *Selection) HasSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.HasNodes()
+ }
+ return s.HasNodes(sel.Nodes...)
+}
+
+// End ends the most recent filtering operation in the current chain and
+// returns the set of matched elements to its previous state.
+func (s *Selection) End() *Selection {
+ if s.prevSel != nil {
+ return s.prevSel
+ }
+ return newEmptySelection(s.document)
+}
+
+// Filter based on the matcher, and the indicator to keep (Filter) or
+// to get rid of (Not) the matching elements.
+func winnow(sel *Selection, m Matcher, keep bool) []*html.Node {
+ // Optimize if keep is requested
+ if keep {
+ return m.Filter(sel.Nodes)
+ }
+ // Use grep
+ return grep(sel, func(i int, s *Selection) bool {
+ return !m.Match(s.Get(0))
+ })
+}
+
+// Filter based on an array of nodes, and the indicator to keep (Filter) or
+// to get rid of (Not) the matching elements.
+func winnowNodes(sel *Selection, nodes []*html.Node, keep bool) []*html.Node {
+ if len(nodes)+len(sel.Nodes) < minNodesForSet {
+ return grep(sel, func(i int, s *Selection) bool {
+ return isInSlice(nodes, s.Get(0)) == keep
+ })
+ }
+
+ set := make(map[*html.Node]bool)
+ for _, n := range nodes {
+ set[n] = true
+ }
+ return grep(sel, func(i int, s *Selection) bool {
+ return set[s.Get(0)] == keep
+ })
+}
+
+// Filter based on a function test, and the indicator to keep (Filter) or
+// to get rid of (Not) the matching elements.
+func winnowFunction(sel *Selection, f func(int, *Selection) bool, keep bool) []*html.Node {
+ return grep(sel, func(i int, s *Selection) bool {
+ return f(i, s) == keep
+ })
+}
@@ -0,0 +1,47 @@
+package goquery
+
+// Each iterates over a Selection object, executing a function for each
+// matched element. It returns the current Selection object. The function
+// f is called for each element in the selection with the index of the
+// element in that selection starting at 0, and a *Selection that contains
+// only that element.
+func (s *Selection) Each(f func(int, *Selection)) *Selection {
+ for i, n := range s.Nodes {
+ f(i, newSingleSelection(n, s.document))
+ }
+ return s
+}
+
+// EachWithBreak iterates over a Selection object, executing a function for each
+// matched element. It is identical to Each except that it is possible to break
+// out of the loop by returning false in the callback function. It returns the
+// current Selection object.
+func (s *Selection) EachWithBreak(f func(int, *Selection) bool) *Selection {
+ for i, n := range s.Nodes {
+ if !f(i, newSingleSelection(n, s.document)) {
+ return s
+ }
+ }
+ return s
+}
+
+// Map passes each element in the current matched set through a function,
+// producing a slice of string holding the returned values. The function
+// f is called for each element in the selection with the index of the
+// element in that selection starting at 0, and a *Selection that contains
+// only that element.
+func (s *Selection) Map(f func(int, *Selection) string) (result []string) {
+ return Map(s, f)
+}
+
+// Map is the generic version of Selection.Map, allowing any type to be
+// returned.
+func Map[E any](s *Selection, f func(int, *Selection) E) (result []E) {
+ result = make([]E, len(s.Nodes))
+
+ for i, n := range s.Nodes {
+ result[i] = f(i, newSingleSelection(n, s.document))
+ }
+
+ return result
+}
@@ -0,0 +1,679 @@
+package goquery
+
+import (
+ "strings"
+
+ "golang.org/x/net/html"
+)
+
+// After applies the selector from the root document and inserts the matched elements
+// after the elements in the set of matched elements.
+//
+// If one of the matched elements in the selection is not currently in the
+// document, it's impossible to insert nodes after it, so it will be ignored.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) After(selector string) *Selection {
+ return s.AfterMatcher(compileMatcher(selector))
+}
+
+// AfterMatcher applies the matcher from the root document and inserts the matched elements
+// after the elements in the set of matched elements.
+//
+// If one of the matched elements in the selection is not currently in the
+// document, it's impossible to insert nodes after it, so it will be ignored.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AfterMatcher(m Matcher) *Selection {
+ return s.AfterNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// AfterSelection inserts the elements in the selection after each element in the set of matched
+// elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AfterSelection(sel *Selection) *Selection {
+ return s.AfterNodes(sel.Nodes...)
+}
+
+// AfterHtml parses the html and inserts it after the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AfterHtml(htmlStr string) *Selection {
+ return s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) {
+ nextSibling := node.NextSibling
+ for _, n := range nodes {
+ if node.Parent != nil {
+ node.Parent.InsertBefore(n, nextSibling)
+ }
+ }
+ })
+}
+
+// AfterNodes inserts the nodes after each element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AfterNodes(ns ...*html.Node) *Selection {
+ return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
+ if sn.Parent != nil {
+ sn.Parent.InsertBefore(n, sn.NextSibling)
+ }
+ })
+}
+
+// Append appends the elements specified by the selector to the end of each element
+// in the set of matched elements, following those rules:
+//
+// 1) The selector is applied to the root document.
+//
+// 2) Elements that are part of the document will be moved to the new location.
+//
+// 3) If there are multiple locations to append to, cloned nodes will be
+// appended to all target locations except the last one, which will be moved
+// as noted in (2).
+func (s *Selection) Append(selector string) *Selection {
+ return s.AppendMatcher(compileMatcher(selector))
+}
+
+// AppendMatcher appends the elements specified by the matcher to the end of each element
+// in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AppendMatcher(m Matcher) *Selection {
+ return s.AppendNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// AppendSelection appends the elements in the selection to the end of each element
+// in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AppendSelection(sel *Selection) *Selection {
+ return s.AppendNodes(sel.Nodes...)
+}
+
+// AppendHtml parses the html and appends it to the set of matched elements.
+func (s *Selection) AppendHtml(htmlStr string) *Selection {
+ return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) {
+ for _, n := range nodes {
+ node.AppendChild(n)
+ }
+ })
+}
+
+// AppendNodes appends the specified nodes to each node in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) AppendNodes(ns ...*html.Node) *Selection {
+ return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
+ sn.AppendChild(n)
+ })
+}
+
+// Before inserts the matched elements before each element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) Before(selector string) *Selection {
+ return s.BeforeMatcher(compileMatcher(selector))
+}
+
+// BeforeMatcher inserts the matched elements before each element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) BeforeMatcher(m Matcher) *Selection {
+ return s.BeforeNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// BeforeSelection inserts the elements in the selection before each element in the set of matched
+// elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) BeforeSelection(sel *Selection) *Selection {
+ return s.BeforeNodes(sel.Nodes...)
+}
+
+// BeforeHtml parses the html and inserts it before the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) BeforeHtml(htmlStr string) *Selection {
+ return s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) {
+ for _, n := range nodes {
+ if node.Parent != nil {
+ node.Parent.InsertBefore(n, node)
+ }
+ }
+ })
+}
+
+// BeforeNodes inserts the nodes before each element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) BeforeNodes(ns ...*html.Node) *Selection {
+ return s.manipulateNodes(ns, false, func(sn *html.Node, n *html.Node) {
+ if sn.Parent != nil {
+ sn.Parent.InsertBefore(n, sn)
+ }
+ })
+}
+
+// Clone creates a deep copy of the set of matched nodes. The new nodes will not be
+// attached to the document.
+func (s *Selection) Clone() *Selection {
+ ns := newEmptySelection(s.document)
+ ns.Nodes = cloneNodes(s.Nodes)
+ return ns
+}
+
+// Empty removes all children nodes from the set of matched elements.
+// It returns the children nodes in a new Selection.
+func (s *Selection) Empty() *Selection {
+ var nodes []*html.Node
+
+ for _, n := range s.Nodes {
+ for c := n.FirstChild; c != nil; c = n.FirstChild {
+ n.RemoveChild(c)
+ nodes = append(nodes, c)
+ }
+ }
+
+ return pushStack(s, nodes)
+}
+
+// Prepend prepends the elements specified by the selector to each element in
+// the set of matched elements, following the same rules as Append.
+func (s *Selection) Prepend(selector string) *Selection {
+ return s.PrependMatcher(compileMatcher(selector))
+}
+
+// PrependMatcher prepends the elements specified by the matcher to each
+// element in the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) PrependMatcher(m Matcher) *Selection {
+ return s.PrependNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// PrependSelection prepends the elements in the selection to each element in
+// the set of matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) PrependSelection(sel *Selection) *Selection {
+ return s.PrependNodes(sel.Nodes...)
+}
+
+// PrependHtml parses the html and prepends it to the set of matched elements.
+func (s *Selection) PrependHtml(htmlStr string) *Selection {
+ return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) {
+ firstChild := node.FirstChild
+ for _, n := range nodes {
+ node.InsertBefore(n, firstChild)
+ }
+ })
+}
+
+// PrependNodes prepends the specified nodes to each node in the set of
+// matched elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) PrependNodes(ns ...*html.Node) *Selection {
+ return s.manipulateNodes(ns, true, func(sn *html.Node, n *html.Node) {
+ // sn.FirstChild may be nil, in which case this functions like
+ // sn.AppendChild()
+ sn.InsertBefore(n, sn.FirstChild)
+ })
+}
+
+// Remove removes the set of matched elements from the document.
+// It returns the same selection, now consisting of nodes not in the document.
+func (s *Selection) Remove() *Selection {
+ for _, n := range s.Nodes {
+ if n.Parent != nil {
+ n.Parent.RemoveChild(n)
+ }
+ }
+
+ return s
+}
+
+// RemoveFiltered removes from the current set of matched elements those that
+// match the selector filter. It returns the Selection of removed nodes.
+//
+// For example if the selection s contains "<h1>", "<h2>" and "<h3>"
+// and s.RemoveFiltered("h2") is called, only the "<h2>" node is removed
+// (and returned), while "<h1>" and "<h3>" are kept in the document.
+func (s *Selection) RemoveFiltered(selector string) *Selection {
+ return s.RemoveMatcher(compileMatcher(selector))
+}
+
+// RemoveMatcher removes from the current set of matched elements those that
+// match the Matcher filter. It returns the Selection of removed nodes.
+// See RemoveFiltered for additional information.
+func (s *Selection) RemoveMatcher(m Matcher) *Selection {
+ return s.FilterMatcher(m).Remove()
+}
+
+// ReplaceWith replaces each element in the set of matched elements with the
+// nodes matched by the given selector.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWith(selector string) *Selection {
+ return s.ReplaceWithMatcher(compileMatcher(selector))
+}
+
+// ReplaceWithMatcher replaces each element in the set of matched elements with
+// the nodes matched by the given Matcher.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWithMatcher(m Matcher) *Selection {
+ return s.ReplaceWithNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// ReplaceWithSelection replaces each element in the set of matched elements with
+// the nodes from the given Selection.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWithSelection(sel *Selection) *Selection {
+ return s.ReplaceWithNodes(sel.Nodes...)
+}
+
+// ReplaceWithHtml replaces each element in the set of matched elements with
+// the parsed HTML.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWithHtml(htmlStr string) *Selection {
+ s.eachNodeHtml(htmlStr, true, func(node *html.Node, nodes []*html.Node) {
+ nextSibling := node.NextSibling
+ for _, n := range nodes {
+ if node.Parent != nil {
+ node.Parent.InsertBefore(n, nextSibling)
+ }
+ }
+ })
+ return s.Remove()
+}
+
+// ReplaceWithNodes replaces each element in the set of matched elements with
+// the given nodes.
+// It returns the removed elements.
+//
+// This follows the same rules as Selection.Append.
+func (s *Selection) ReplaceWithNodes(ns ...*html.Node) *Selection {
+ s.AfterNodes(ns...)
+ return s.Remove()
+}
+
+// SetHtml sets the html content of each element in the selection to
+// specified html string.
+func (s *Selection) SetHtml(htmlStr string) *Selection {
+ for _, context := range s.Nodes {
+ for c := context.FirstChild; c != nil; c = context.FirstChild {
+ context.RemoveChild(c)
+ }
+ }
+ return s.eachNodeHtml(htmlStr, false, func(node *html.Node, nodes []*html.Node) {
+ for _, n := range nodes {
+ node.AppendChild(n)
+ }
+ })
+}
+
+// SetText sets the content of each element in the selection to specified content.
+// The provided text string is escaped.
+func (s *Selection) SetText(text string) *Selection {
+ return s.SetHtml(html.EscapeString(text))
+}
+
+// Unwrap removes the parents of the set of matched elements, leaving the matched
+// elements (and their siblings, if any) in their place.
+// It returns the original selection.
+func (s *Selection) Unwrap() *Selection {
+ s.Parent().Each(func(i int, ss *Selection) {
+ // For some reason, jquery allows unwrap to remove the <head> element, so
+ // allowing it here too. Same for <html>. Why it allows those elements to
+ // be unwrapped while not allowing body is a mystery to me.
+ if ss.Nodes[0].Data != "body" {
+ ss.ReplaceWithSelection(ss.Contents())
+ }
+ })
+
+ return s
+}
+
+// Wrap wraps each element in the set of matched elements inside the first
+// element matched by the given selector. The matched child is cloned before
+// being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) Wrap(selector string) *Selection {
+ return s.WrapMatcher(compileMatcher(selector))
+}
+
+// WrapMatcher wraps each element in the set of matched elements inside the
+// first element matched by the given matcher. The matched child is cloned
+// before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapMatcher(m Matcher) *Selection {
+ return s.wrapNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// WrapSelection wraps each element in the set of matched elements inside the
+// first element in the given Selection. The element is cloned before being
+// inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapSelection(sel *Selection) *Selection {
+ return s.wrapNodes(sel.Nodes...)
+}
+
+// WrapHtml wraps each element in the set of matched elements inside the inner-
+// most child of the given HTML.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapHtml(htmlStr string) *Selection {
+ nodesMap := make(map[string][]*html.Node)
+ for _, context := range s.Nodes {
+ var parent *html.Node
+ if context.Parent != nil {
+ parent = context.Parent
+ } else {
+ parent = &html.Node{Type: html.ElementNode}
+ }
+ nodes, found := nodesMap[nodeName(parent)]
+ if !found {
+ nodes = parseHtmlWithContext(htmlStr, parent)
+ nodesMap[nodeName(parent)] = nodes
+ }
+ newSingleSelection(context, s.document).wrapAllNodes(cloneNodes(nodes)...)
+ }
+ return s
+}
+
+// WrapNode wraps each element in the set of matched elements inside the inner-
+// most child of the given node. The given node is copied before being inserted
+// into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapNode(n *html.Node) *Selection {
+ return s.wrapNodes(n)
+}
+
+func (s *Selection) wrapNodes(ns ...*html.Node) *Selection {
+ s.Each(func(i int, ss *Selection) {
+ ss.wrapAllNodes(ns...)
+ })
+
+ return s
+}
+
+// WrapAll wraps a single HTML structure, matched by the given selector, around
+// all elements in the set of matched elements. The matched child is cloned
+// before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAll(selector string) *Selection {
+ return s.WrapAllMatcher(compileMatcher(selector))
+}
+
+// WrapAllMatcher wraps a single HTML structure, matched by the given Matcher,
+// around all elements in the set of matched elements. The matched child is
+// cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAllMatcher(m Matcher) *Selection {
+ return s.wrapAllNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// WrapAllSelection wraps a single HTML structure, the first node of the given
+// Selection, around all elements in the set of matched elements. The matched
+// child is cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAllSelection(sel *Selection) *Selection {
+ return s.wrapAllNodes(sel.Nodes...)
+}
+
+// WrapAllHtml wraps the given HTML structure around all elements in the set of
+// matched elements. The matched child is cloned before being inserted into the
+// document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAllHtml(htmlStr string) *Selection {
+ var context *html.Node
+ var nodes []*html.Node
+ if len(s.Nodes) > 0 {
+ context = s.Nodes[0]
+ if context.Parent != nil {
+ nodes = parseHtmlWithContext(htmlStr, context)
+ } else {
+ nodes = parseHtml(htmlStr)
+ }
+ }
+ return s.wrapAllNodes(nodes...)
+}
+
+func (s *Selection) wrapAllNodes(ns ...*html.Node) *Selection {
+ if len(ns) > 0 {
+ return s.WrapAllNode(ns[0])
+ }
+ return s
+}
+
+// WrapAllNode wraps the given node around the first element in the Selection,
+// making all other nodes in the Selection children of the given node. The node
+// is cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapAllNode(n *html.Node) *Selection {
+ if s.Size() == 0 {
+ return s
+ }
+
+ wrap := cloneNode(n)
+
+ first := s.Nodes[0]
+ if first.Parent != nil {
+ first.Parent.InsertBefore(wrap, first)
+ first.Parent.RemoveChild(first)
+ }
+
+ for c := getFirstChildEl(wrap); c != nil; c = getFirstChildEl(wrap) {
+ wrap = c
+ }
+
+ newSingleSelection(wrap, s.document).AppendSelection(s)
+
+ return s
+}
+
+// WrapInner wraps an HTML structure, matched by the given selector, around the
+// content of element in the set of matched elements. The matched child is
+// cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInner(selector string) *Selection {
+ return s.WrapInnerMatcher(compileMatcher(selector))
+}
+
+// WrapInnerMatcher wraps an HTML structure, matched by the given selector,
+// around the content of element in the set of matched elements. The matched
+// child is cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInnerMatcher(m Matcher) *Selection {
+ return s.wrapInnerNodes(m.MatchAll(s.document.rootNode)...)
+}
+
+// WrapInnerSelection wraps an HTML structure, matched by the given selector,
+// around the content of element in the set of matched elements. The matched
+// child is cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInnerSelection(sel *Selection) *Selection {
+ return s.wrapInnerNodes(sel.Nodes...)
+}
+
+// WrapInnerHtml wraps an HTML structure, matched by the given selector, around
+// the content of element in the set of matched elements. The matched child is
+// cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInnerHtml(htmlStr string) *Selection {
+ nodesMap := make(map[string][]*html.Node)
+ for _, context := range s.Nodes {
+ nodes, found := nodesMap[nodeName(context)]
+ if !found {
+ nodes = parseHtmlWithContext(htmlStr, context)
+ nodesMap[nodeName(context)] = nodes
+ }
+ newSingleSelection(context, s.document).wrapInnerNodes(cloneNodes(nodes)...)
+ }
+ return s
+}
+
+// WrapInnerNode wraps an HTML structure, matched by the given selector, around
+// the content of element in the set of matched elements. The matched child is
+// cloned before being inserted into the document.
+//
+// It returns the original set of elements.
+func (s *Selection) WrapInnerNode(n *html.Node) *Selection {
+ return s.wrapInnerNodes(n)
+}
+
+func (s *Selection) wrapInnerNodes(ns ...*html.Node) *Selection {
+ if len(ns) == 0 {
+ return s
+ }
+
+ s.Each(func(i int, s *Selection) {
+ contents := s.Contents()
+
+ if contents.Size() > 0 {
+ contents.wrapAllNodes(ns...)
+ } else {
+ s.AppendNodes(cloneNode(ns[0]))
+ }
+ })
+
+ return s
+}
+
+func parseHtml(h string) []*html.Node {
+ // Errors are only returned when the io.Reader returns any error besides
+ // EOF, but strings.Reader never will
+ nodes, err := html.ParseFragment(strings.NewReader(h), &html.Node{Type: html.ElementNode})
+ if err != nil {
+ panic("goquery: failed to parse HTML: " + err.Error())
+ }
+ return nodes
+}
+
+func parseHtmlWithContext(h string, context *html.Node) []*html.Node {
+ // Errors are only returned when the io.Reader returns any error besides
+ // EOF, but strings.Reader never will
+ nodes, err := html.ParseFragment(strings.NewReader(h), context)
+ if err != nil {
+ panic("goquery: failed to parse HTML: " + err.Error())
+ }
+ return nodes
+}
+
+// Get the first child that is an ElementNode
+func getFirstChildEl(n *html.Node) *html.Node {
+ c := n.FirstChild
+ for c != nil && c.Type != html.ElementNode {
+ c = c.NextSibling
+ }
+ return c
+}
+
+// Deep copy a slice of nodes.
+func cloneNodes(ns []*html.Node) []*html.Node {
+ cns := make([]*html.Node, 0, len(ns))
+
+ for _, n := range ns {
+ cns = append(cns, cloneNode(n))
+ }
+
+ return cns
+}
+
+// Deep copy a node. The new node has clones of all the original node's
+// children but none of its parents or siblings.
+func cloneNode(n *html.Node) *html.Node {
+ nn := &html.Node{
+ Type: n.Type,
+ DataAtom: n.DataAtom,
+ Data: n.Data,
+ Attr: make([]html.Attribute, len(n.Attr)),
+ }
+
+ copy(nn.Attr, n.Attr)
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ nn.AppendChild(cloneNode(c))
+ }
+
+ return nn
+}
+
+func (s *Selection) manipulateNodes(ns []*html.Node, reverse bool,
+ f func(sn *html.Node, n *html.Node)) *Selection {
+
+ lasti := s.Size() - 1
+
+ // net.Html doesn't provide document fragments for insertion, so to get
+ // things in the correct order with After() and Prepend(), the callback
+ // needs to be called on the reverse of the nodes.
+ if reverse {
+ for i, j := 0, len(ns)-1; i < j; i, j = i+1, j-1 {
+ ns[i], ns[j] = ns[j], ns[i]
+ }
+ }
+
+ for i, sn := range s.Nodes {
+ for _, n := range ns {
+ if i != lasti {
+ f(sn, cloneNode(n))
+ } else {
+ if n.Parent != nil {
+ n.Parent.RemoveChild(n)
+ }
+ f(sn, n)
+ }
+ }
+ }
+
+ return s
+}
+
+// eachNodeHtml parses the given html string and inserts the resulting nodes in the dom with the mergeFn.
+// The parsed nodes are inserted for each element of the selection.
+// isParent can be used to indicate that the elements of the selection should be treated as the parent for the parsed html.
+// A cache is used to avoid parsing the html multiple times should the elements of the selection result in the same context.
+func (s *Selection) eachNodeHtml(htmlStr string, isParent bool, mergeFn func(n *html.Node, nodes []*html.Node)) *Selection {
+ // cache to avoid parsing the html for the same context multiple times
+ nodeCache := make(map[string][]*html.Node)
+ var context *html.Node
+ for _, n := range s.Nodes {
+ if isParent {
+ context = n.Parent
+ } else {
+ if n.Type != html.ElementNode {
+ continue
+ }
+ context = n
+ }
+ if context != nil {
+ nodes, found := nodeCache[nodeName(context)]
+ if !found {
+ nodes = parseHtmlWithContext(htmlStr, context)
+ nodeCache[nodeName(context)] = nodes
+ }
+ mergeFn(n, cloneNodes(nodes))
+ }
+ }
+ return s
+}
@@ -0,0 +1,275 @@
+package goquery
+
+import (
+ "bytes"
+ "regexp"
+ "strings"
+
+ "golang.org/x/net/html"
+)
+
+var rxClassTrim = regexp.MustCompile("[\t\r\n]")
+
+// Attr gets the specified attribute's value for the first element in the
+// Selection. To get the value for each element individually, use a looping
+// construct such as Each or Map method.
+func (s *Selection) Attr(attrName string) (val string, exists bool) {
+ if len(s.Nodes) == 0 {
+ return
+ }
+ return getAttributeValue(attrName, s.Nodes[0])
+}
+
+// AttrOr works like Attr but returns default value if attribute is not present.
+func (s *Selection) AttrOr(attrName, defaultValue string) string {
+ if len(s.Nodes) == 0 {
+ return defaultValue
+ }
+
+ val, exists := getAttributeValue(attrName, s.Nodes[0])
+ if !exists {
+ return defaultValue
+ }
+
+ return val
+}
+
+// RemoveAttr removes the named attribute from each element in the set of matched elements.
+func (s *Selection) RemoveAttr(attrName string) *Selection {
+ for _, n := range s.Nodes {
+ removeAttr(n, attrName)
+ }
+
+ return s
+}
+
+// SetAttr sets the given attribute on each element in the set of matched elements.
+func (s *Selection) SetAttr(attrName, val string) *Selection {
+ for _, n := range s.Nodes {
+ attr := getAttributePtr(attrName, n)
+ if attr == nil {
+ n.Attr = append(n.Attr, html.Attribute{Key: attrName, Val: val})
+ } else {
+ attr.Val = val
+ }
+ }
+
+ return s
+}
+
+// Text gets the combined text contents of each element in the set of matched
+// elements, including their descendants.
+func (s *Selection) Text() string {
+ var buf bytes.Buffer
+
+ // Slightly optimized vs calling Each: no single selection object created
+ var f func(*html.Node)
+ f = func(n *html.Node) {
+ if n.Type == html.TextNode {
+ // Keep newlines and spaces, like jQuery
+ buf.WriteString(n.Data)
+ }
+ if n.FirstChild != nil {
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ f(c)
+ }
+ }
+ }
+ for _, n := range s.Nodes {
+ f(n)
+ }
+
+ return buf.String()
+}
+
+// Size is an alias for Length.
+func (s *Selection) Size() int {
+ return s.Length()
+}
+
+// Length returns the number of elements in the Selection object.
+func (s *Selection) Length() int {
+ return len(s.Nodes)
+}
+
+// Html gets the HTML contents of the first element in the set of matched
+// elements. It includes text and comment nodes.
+func (s *Selection) Html() (ret string, e error) {
+ // Since there is no .innerHtml, the HTML content must be re-created from
+ // the nodes using html.Render.
+ var buf bytes.Buffer
+
+ if len(s.Nodes) > 0 {
+ for c := s.Nodes[0].FirstChild; c != nil; c = c.NextSibling {
+ e = html.Render(&buf, c)
+ if e != nil {
+ return
+ }
+ }
+ ret = buf.String()
+ }
+
+ return
+}
+
+// AddClass adds the given class(es) to each element in the set of matched elements.
+// Multiple class names can be specified, separated by a space or via multiple arguments.
+func (s *Selection) AddClass(class ...string) *Selection {
+ classStr := strings.TrimSpace(strings.Join(class, " "))
+
+ if classStr == "" {
+ return s
+ }
+
+ tcls := getClassesSlice(classStr)
+ for _, n := range s.Nodes {
+ curClasses, attr := getClassesAndAttr(n, true)
+ for _, newClass := range tcls {
+ if !strings.Contains(curClasses, " "+newClass+" ") {
+ curClasses += newClass + " "
+ }
+ }
+
+ setClasses(n, attr, curClasses)
+ }
+
+ return s
+}
+
+// HasClass determines whether any of the matched elements are assigned the
+// given class.
+func (s *Selection) HasClass(class string) bool {
+ class = " " + class + " "
+ for _, n := range s.Nodes {
+ classes, _ := getClassesAndAttr(n, false)
+ if strings.Contains(classes, class) {
+ return true
+ }
+ }
+ return false
+}
+
+// RemoveClass removes the given class(es) from each element in the set of matched elements.
+// Multiple class names can be specified, separated by a space or via multiple arguments.
+// If no class name is provided, all classes are removed.
+func (s *Selection) RemoveClass(class ...string) *Selection {
+ var rclasses []string
+
+ classStr := strings.TrimSpace(strings.Join(class, " "))
+ remove := classStr == ""
+
+ if !remove {
+ rclasses = getClassesSlice(classStr)
+ }
+
+ for _, n := range s.Nodes {
+ if remove {
+ removeAttr(n, "class")
+ } else {
+ classes, attr := getClassesAndAttr(n, true)
+ for _, rcl := range rclasses {
+ classes = strings.Replace(classes, " "+rcl+" ", " ", -1)
+ }
+
+ setClasses(n, attr, classes)
+ }
+ }
+
+ return s
+}
+
+// ToggleClass adds or removes the given class(es) for each element in the set of matched elements.
+// Multiple class names can be specified, separated by a space or via multiple arguments.
+func (s *Selection) ToggleClass(class ...string) *Selection {
+ classStr := strings.TrimSpace(strings.Join(class, " "))
+
+ if classStr == "" {
+ return s
+ }
+
+ tcls := getClassesSlice(classStr)
+
+ for _, n := range s.Nodes {
+ classes, attr := getClassesAndAttr(n, true)
+ for _, tcl := range tcls {
+ if strings.Contains(classes, " "+tcl+" ") {
+ classes = strings.Replace(classes, " "+tcl+" ", " ", -1)
+ } else {
+ classes += tcl + " "
+ }
+ }
+
+ setClasses(n, attr, classes)
+ }
+
+ return s
+}
+
+func getAttributePtr(attrName string, n *html.Node) *html.Attribute {
+ if n == nil {
+ return nil
+ }
+
+ for i, a := range n.Attr {
+ if a.Key == attrName {
+ return &n.Attr[i]
+ }
+ }
+ return nil
+}
+
+// Private function to get the specified attribute's value from a node.
+func getAttributeValue(attrName string, n *html.Node) (val string, exists bool) {
+ if a := getAttributePtr(attrName, n); a != nil {
+ val = a.Val
+ exists = true
+ }
+ return
+}
+
+// Get and normalize the "class" attribute from the node.
+func getClassesAndAttr(n *html.Node, create bool) (classes string, attr *html.Attribute) {
+ // Applies only to element nodes
+ if n.Type == html.ElementNode {
+ attr = getAttributePtr("class", n)
+ if attr == nil && create {
+ n.Attr = append(n.Attr, html.Attribute{
+ Key: "class",
+ Val: "",
+ })
+ attr = &n.Attr[len(n.Attr)-1]
+ }
+ }
+
+ if attr == nil {
+ classes = " "
+ } else {
+ classes = rxClassTrim.ReplaceAllString(" "+attr.Val+" ", " ")
+ }
+
+ return
+}
+
+func getClassesSlice(classes string) []string {
+ return strings.Split(rxClassTrim.ReplaceAllString(" "+classes+" ", " "), " ")
+}
+
+func removeAttr(n *html.Node, attrName string) {
+ for i, a := range n.Attr {
+ if a.Key == attrName {
+ n.Attr[i], n.Attr[len(n.Attr)-1], n.Attr =
+ n.Attr[len(n.Attr)-1], html.Attribute{}, n.Attr[:len(n.Attr)-1]
+ return
+ }
+ }
+}
+
+func setClasses(n *html.Node, attr *html.Attribute, classes string) {
+ classes = strings.TrimSpace(classes)
+ if classes == "" {
+ removeAttr(n, "class")
+ return
+ }
+
+ attr.Val = classes
+}
@@ -0,0 +1,49 @@
+package goquery
+
+import "golang.org/x/net/html"
+
+// Is checks the current matched set of elements against a selector and
+// returns true if at least one of these elements matches.
+func (s *Selection) Is(selector string) bool {
+ return s.IsMatcher(compileMatcher(selector))
+}
+
+// IsMatcher checks the current matched set of elements against a matcher and
+// returns true if at least one of these elements matches.
+func (s *Selection) IsMatcher(m Matcher) bool {
+ if len(s.Nodes) > 0 {
+ if len(s.Nodes) == 1 {
+ return m.Match(s.Nodes[0])
+ }
+ return len(m.Filter(s.Nodes)) > 0
+ }
+
+ return false
+}
+
+// IsFunction checks the current matched set of elements against a predicate and
+// returns true if at least one of these elements matches.
+func (s *Selection) IsFunction(f func(int, *Selection) bool) bool {
+ return s.FilterFunction(f).Length() > 0
+}
+
+// IsSelection checks the current matched set of elements against a Selection object
+// and returns true if at least one of these elements matches.
+func (s *Selection) IsSelection(sel *Selection) bool {
+ return s.FilterSelection(sel).Length() > 0
+}
+
+// IsNodes checks the current matched set of elements against the specified nodes
+// and returns true if at least one of these elements matches.
+func (s *Selection) IsNodes(nodes ...*html.Node) bool {
+ return s.FilterNodes(nodes...).Length() > 0
+}
+
+// Contains returns true if the specified Node is within,
+// at any depth, one of the nodes in the Selection object.
+// It is NOT inclusive, to behave like jQuery's implementation, and
+// unlike Javascript's .contains, so if the contained
+// node is itself in the selection, it returns false.
+func (s *Selection) Contains(n *html.Node) bool {
+ return sliceContains(s.Nodes, n)
+}
@@ -0,0 +1,704 @@
+package goquery
+
+import "golang.org/x/net/html"
+
+type siblingType int
+
+// Sibling type, used internally when iterating over children at the same
+// level (siblings) to specify which nodes are requested.
+const (
+ siblingPrevUntil siblingType = iota - 3
+ siblingPrevAll
+ siblingPrev
+ siblingAll
+ siblingNext
+ siblingNextAll
+ siblingNextUntil
+ siblingAllIncludingNonElements
+)
+
+// Find gets the descendants of each element in the current set of matched
+// elements, filtered by a selector. It returns a new Selection object
+// containing these matched elements.
+//
+// Note that as for all methods accepting a selector string, the selector is
+// compiled and applied by the cascadia package and inherits its behavior and
+// constraints regarding supported selectors. See the note on cascadia in
+// the goquery documentation here:
+// https://github.com/PuerkitoBio/goquery?tab=readme-ov-file#api
+func (s *Selection) Find(selector string) *Selection {
+ return pushStack(s, findWithMatcher(s.Nodes, compileMatcher(selector)))
+}
+
+// FindMatcher gets the descendants of each element in the current set of matched
+// elements, filtered by the matcher. It returns a new Selection object
+// containing these matched elements.
+func (s *Selection) FindMatcher(m Matcher) *Selection {
+ return pushStack(s, findWithMatcher(s.Nodes, m))
+}
+
+// FindSelection gets the descendants of each element in the current
+// Selection, filtered by a Selection. It returns a new Selection object
+// containing these matched elements.
+func (s *Selection) FindSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return pushStack(s, nil)
+ }
+ return s.FindNodes(sel.Nodes...)
+}
+
+// FindNodes gets the descendants of each element in the current
+// Selection, filtered by some nodes. It returns a new Selection object
+// containing these matched elements.
+func (s *Selection) FindNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
+ if sliceContains(s.Nodes, n) {
+ return []*html.Node{n}
+ }
+ return nil
+ }))
+}
+
+// Contents gets the children of each element in the Selection,
+// including text and comment nodes. It returns a new Selection object
+// containing these elements.
+func (s *Selection) Contents() *Selection {
+ return pushStack(s, getChildrenNodes(s.Nodes, siblingAllIncludingNonElements))
+}
+
+// ContentsFiltered gets the children of each element in the Selection,
+// filtered by the specified selector. It returns a new Selection
+// object containing these elements. Since selectors only act on Element nodes,
+// this function is an alias to ChildrenFiltered unless the selector is empty,
+// in which case it is an alias to Contents.
+func (s *Selection) ContentsFiltered(selector string) *Selection {
+ if selector != "" {
+ return s.ChildrenFiltered(selector)
+ }
+ return s.Contents()
+}
+
+// ContentsMatcher gets the children of each element in the Selection,
+// filtered by the specified matcher. It returns a new Selection
+// object containing these elements. Since matchers only act on Element nodes,
+// this function is an alias to ChildrenMatcher.
+func (s *Selection) ContentsMatcher(m Matcher) *Selection {
+ return s.ChildrenMatcher(m)
+}
+
+// Children gets the child elements of each element in the Selection.
+// It returns a new Selection object containing these elements.
+func (s *Selection) Children() *Selection {
+ return pushStack(s, getChildrenNodes(s.Nodes, siblingAll))
+}
+
+// ChildrenFiltered gets the child elements of each element in the Selection,
+// filtered by the specified selector. It returns a new
+// Selection object containing these elements.
+func (s *Selection) ChildrenFiltered(selector string) *Selection {
+ return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), compileMatcher(selector))
+}
+
+// ChildrenMatcher gets the child elements of each element in the Selection,
+// filtered by the specified matcher. It returns a new
+// Selection object containing these elements.
+func (s *Selection) ChildrenMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getChildrenNodes(s.Nodes, siblingAll), m)
+}
+
+// Parent gets the parent of each element in the Selection. It returns a
+// new Selection object containing the matched elements.
+func (s *Selection) Parent() *Selection {
+ return pushStack(s, getParentNodes(s.Nodes))
+}
+
+// ParentFiltered gets the parent of each element in the Selection filtered by a
+// selector. It returns a new Selection object containing the matched elements.
+func (s *Selection) ParentFiltered(selector string) *Selection {
+ return filterAndPush(s, getParentNodes(s.Nodes), compileMatcher(selector))
+}
+
+// ParentMatcher gets the parent of each element in the Selection filtered by a
+// matcher. It returns a new Selection object containing the matched elements.
+func (s *Selection) ParentMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getParentNodes(s.Nodes), m)
+}
+
+// Closest gets the first element that matches the selector by testing the
+// element itself and traversing up through its ancestors in the DOM tree.
+func (s *Selection) Closest(selector string) *Selection {
+ cs := compileMatcher(selector)
+ return s.ClosestMatcher(cs)
+}
+
+// ClosestMatcher gets the first element that matches the matcher by testing the
+// element itself and traversing up through its ancestors in the DOM tree.
+func (s *Selection) ClosestMatcher(m Matcher) *Selection {
+ return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
+ // For each node in the selection, test the node itself, then each parent
+ // until a match is found.
+ for ; n != nil; n = n.Parent {
+ if m.Match(n) {
+ return []*html.Node{n}
+ }
+ }
+ return nil
+ }))
+}
+
+// ClosestNodes gets the first element that matches one of the nodes by testing the
+// element itself and traversing up through its ancestors in the DOM tree.
+func (s *Selection) ClosestNodes(nodes ...*html.Node) *Selection {
+ set := make(map[*html.Node]bool)
+ for _, n := range nodes {
+ set[n] = true
+ }
+ return pushStack(s, mapNodes(s.Nodes, func(i int, n *html.Node) []*html.Node {
+ // For each node in the selection, test the node itself, then each parent
+ // until a match is found.
+ for ; n != nil; n = n.Parent {
+ if set[n] {
+ return []*html.Node{n}
+ }
+ }
+ return nil
+ }))
+}
+
+// ClosestSelection gets the first element that matches one of the nodes in the
+// Selection by testing the element itself and traversing up through its ancestors
+// in the DOM tree.
+func (s *Selection) ClosestSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return pushStack(s, nil)
+ }
+ return s.ClosestNodes(sel.Nodes...)
+}
+
+// Parents gets the ancestors of each element in the current Selection. It
+// returns a new Selection object with the matched elements.
+func (s *Selection) Parents() *Selection {
+ return pushStack(s, getParentsNodes(s.Nodes, nil, nil))
+}
+
+// ParentsFiltered gets the ancestors of each element in the current
+// Selection. It returns a new Selection object with the matched elements.
+func (s *Selection) ParentsFiltered(selector string) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), compileMatcher(selector))
+}
+
+// ParentsMatcher gets the ancestors of each element in the current
+// Selection. It returns a new Selection object with the matched elements.
+func (s *Selection) ParentsMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, nil, nil), m)
+}
+
+// ParentsUntil gets the ancestors of each element in the Selection, up to but
+// not including the element matched by the selector. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) ParentsUntil(selector string) *Selection {
+ return pushStack(s, getParentsNodes(s.Nodes, compileMatcher(selector), nil))
+}
+
+// ParentsUntilMatcher gets the ancestors of each element in the Selection, up to but
+// not including the element matched by the matcher. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) ParentsUntilMatcher(m Matcher) *Selection {
+ return pushStack(s, getParentsNodes(s.Nodes, m, nil))
+}
+
+// ParentsUntilSelection gets the ancestors of each element in the Selection,
+// up to but not including the elements in the specified Selection. It returns a
+// new Selection object containing the matched elements.
+func (s *Selection) ParentsUntilSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.Parents()
+ }
+ return s.ParentsUntilNodes(sel.Nodes...)
+}
+
+// ParentsUntilNodes gets the ancestors of each element in the Selection,
+// up to but not including the specified nodes. It returns a
+// new Selection object containing the matched elements.
+func (s *Selection) ParentsUntilNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, getParentsNodes(s.Nodes, nil, nodes))
+}
+
+// ParentsFilteredUntil is like ParentsUntil, with the option to filter the
+// results based on a selector string. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) ParentsFilteredUntil(filterSelector, untilSelector string) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
+}
+
+// ParentsFilteredUntilMatcher is like ParentsUntilMatcher, with the option to filter the
+// results based on a matcher. It returns a new Selection object containing the matched elements.
+func (s *Selection) ParentsFilteredUntilMatcher(filter, until Matcher) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, until, nil), filter)
+}
+
+// ParentsFilteredUntilSelection is like ParentsUntilSelection, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) ParentsFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
+ return s.ParentsMatcherUntilSelection(compileMatcher(filterSelector), sel)
+}
+
+// ParentsMatcherUntilSelection is like ParentsUntilSelection, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) ParentsMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
+ if sel == nil {
+ return s.ParentsMatcher(filter)
+ }
+ return s.ParentsMatcherUntilNodes(filter, sel.Nodes...)
+}
+
+// ParentsFilteredUntilNodes is like ParentsUntilNodes, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) ParentsFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), compileMatcher(filterSelector))
+}
+
+// ParentsMatcherUntilNodes is like ParentsUntilNodes, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) ParentsMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getParentsNodes(s.Nodes, nil, nodes), filter)
+}
+
+// Siblings gets the siblings of each element in the Selection. It returns
+// a new Selection object containing the matched elements.
+func (s *Selection) Siblings() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil))
+}
+
+// SiblingsFiltered gets the siblings of each element in the Selection
+// filtered by a selector. It returns a new Selection object containing the
+// matched elements.
+func (s *Selection) SiblingsFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), compileMatcher(selector))
+}
+
+// SiblingsMatcher gets the siblings of each element in the Selection
+// filtered by a matcher. It returns a new Selection object containing the
+// matched elements.
+func (s *Selection) SiblingsMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingAll, nil, nil), m)
+}
+
+// Next gets the immediately following sibling of each element in the
+// Selection. It returns a new Selection object containing the matched elements.
+func (s *Selection) Next() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil))
+}
+
+// NextFiltered gets the immediately following sibling of each element in the
+// Selection filtered by a selector. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) NextFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), compileMatcher(selector))
+}
+
+// NextMatcher gets the immediately following sibling of each element in the
+// Selection filtered by a matcher. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) NextMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNext, nil, nil), m)
+}
+
+// NextAll gets all the following siblings of each element in the
+// Selection. It returns a new Selection object containing the matched elements.
+func (s *Selection) NextAll() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil))
+}
+
+// NextAllFiltered gets all the following siblings of each element in the
+// Selection filtered by a selector. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) NextAllFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), compileMatcher(selector))
+}
+
+// NextAllMatcher gets all the following siblings of each element in the
+// Selection filtered by a matcher. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) NextAllMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextAll, nil, nil), m)
+}
+
+// Prev gets the immediately preceding sibling of each element in the
+// Selection. It returns a new Selection object containing the matched elements.
+func (s *Selection) Prev() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil))
+}
+
+// PrevFiltered gets the immediately preceding sibling of each element in the
+// Selection filtered by a selector. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) PrevFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), compileMatcher(selector))
+}
+
+// PrevMatcher gets the immediately preceding sibling of each element in the
+// Selection filtered by a matcher. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) PrevMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrev, nil, nil), m)
+}
+
+// PrevAll gets all the preceding siblings of each element in the
+// Selection. It returns a new Selection object containing the matched elements.
+func (s *Selection) PrevAll() *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil))
+}
+
+// PrevAllFiltered gets all the preceding siblings of each element in the
+// Selection filtered by a selector. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) PrevAllFiltered(selector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), compileMatcher(selector))
+}
+
+// PrevAllMatcher gets all the preceding siblings of each element in the
+// Selection filtered by a matcher. It returns a new Selection object
+// containing the matched elements.
+func (s *Selection) PrevAllMatcher(m Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevAll, nil, nil), m)
+}
+
+// NextUntil gets all following siblings of each element up to but not
+// including the element matched by the selector. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) NextUntil(selector string) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ compileMatcher(selector), nil))
+}
+
+// NextUntilMatcher gets all following siblings of each element up to but not
+// including the element matched by the matcher. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) NextUntilMatcher(m Matcher) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ m, nil))
+}
+
+// NextUntilSelection gets all following siblings of each element up to but not
+// including the element matched by the Selection. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) NextUntilSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.NextAll()
+ }
+ return s.NextUntilNodes(sel.Nodes...)
+}
+
+// NextUntilNodes gets all following siblings of each element up to but not
+// including the element matched by the nodes. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) NextUntilNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ nil, nodes))
+}
+
+// PrevUntil gets all preceding siblings of each element up to but not
+// including the element matched by the selector. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) PrevUntil(selector string) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ compileMatcher(selector), nil))
+}
+
+// PrevUntilMatcher gets all preceding siblings of each element up to but not
+// including the element matched by the matcher. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) PrevUntilMatcher(m Matcher) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ m, nil))
+}
+
+// PrevUntilSelection gets all preceding siblings of each element up to but not
+// including the element matched by the Selection. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) PrevUntilSelection(sel *Selection) *Selection {
+ if sel == nil {
+ return s.PrevAll()
+ }
+ return s.PrevUntilNodes(sel.Nodes...)
+}
+
+// PrevUntilNodes gets all preceding siblings of each element up to but not
+// including the element matched by the nodes. It returns a new Selection
+// object containing the matched elements.
+func (s *Selection) PrevUntilNodes(nodes ...*html.Node) *Selection {
+ return pushStack(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ nil, nodes))
+}
+
+// NextFilteredUntil is like NextUntil, with the option to filter
+// the results based on a selector string.
+// It returns a new Selection object containing the matched elements.
+func (s *Selection) NextFilteredUntil(filterSelector, untilSelector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
+}
+
+// NextFilteredUntilMatcher is like NextUntilMatcher, with the option to filter
+// the results based on a matcher.
+// It returns a new Selection object containing the matched elements.
+func (s *Selection) NextFilteredUntilMatcher(filter, until Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ until, nil), filter)
+}
+
+// NextFilteredUntilSelection is like NextUntilSelection, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) NextFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
+ return s.NextMatcherUntilSelection(compileMatcher(filterSelector), sel)
+}
+
+// NextMatcherUntilSelection is like NextUntilSelection, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) NextMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
+ if sel == nil {
+ return s.NextMatcher(filter)
+ }
+ return s.NextMatcherUntilNodes(filter, sel.Nodes...)
+}
+
+// NextFilteredUntilNodes is like NextUntilNodes, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) NextFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ nil, nodes), compileMatcher(filterSelector))
+}
+
+// NextMatcherUntilNodes is like NextUntilNodes, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) NextMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingNextUntil,
+ nil, nodes), filter)
+}
+
+// PrevFilteredUntil is like PrevUntil, with the option to filter
+// the results based on a selector string.
+// It returns a new Selection object containing the matched elements.
+func (s *Selection) PrevFilteredUntil(filterSelector, untilSelector string) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ compileMatcher(untilSelector), nil), compileMatcher(filterSelector))
+}
+
+// PrevFilteredUntilMatcher is like PrevUntilMatcher, with the option to filter
+// the results based on a matcher.
+// It returns a new Selection object containing the matched elements.
+func (s *Selection) PrevFilteredUntilMatcher(filter, until Matcher) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ until, nil), filter)
+}
+
+// PrevFilteredUntilSelection is like PrevUntilSelection, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) PrevFilteredUntilSelection(filterSelector string, sel *Selection) *Selection {
+ return s.PrevMatcherUntilSelection(compileMatcher(filterSelector), sel)
+}
+
+// PrevMatcherUntilSelection is like PrevUntilSelection, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) PrevMatcherUntilSelection(filter Matcher, sel *Selection) *Selection {
+ if sel == nil {
+ return s.PrevMatcher(filter)
+ }
+ return s.PrevMatcherUntilNodes(filter, sel.Nodes...)
+}
+
+// PrevFilteredUntilNodes is like PrevUntilNodes, with the
+// option to filter the results based on a selector string. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) PrevFilteredUntilNodes(filterSelector string, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ nil, nodes), compileMatcher(filterSelector))
+}
+
+// PrevMatcherUntilNodes is like PrevUntilNodes, with the
+// option to filter the results based on a matcher. It returns a new
+// Selection object containing the matched elements.
+func (s *Selection) PrevMatcherUntilNodes(filter Matcher, nodes ...*html.Node) *Selection {
+ return filterAndPush(s, getSiblingNodes(s.Nodes, siblingPrevUntil,
+ nil, nodes), filter)
+}
+
+// Filter and push filters the nodes based on a matcher, and pushes the results
+// on the stack, with the srcSel as previous selection.
+func filterAndPush(srcSel *Selection, nodes []*html.Node, m Matcher) *Selection {
+ // Create a temporary Selection with the specified nodes to filter using winnow
+ sel := &Selection{nodes, srcSel.document, nil}
+ // Filter based on matcher and push on stack
+ return pushStack(srcSel, winnow(sel, m, true))
+}
+
+// Internal implementation of Find that return raw nodes.
+func findWithMatcher(nodes []*html.Node, m Matcher) []*html.Node {
+ // Map nodes to find the matches within the children of each node
+ return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
+ // Go down one level, becausejQuery's Find selects only within descendants
+ for c := n.FirstChild; c != nil; c = c.NextSibling {
+ if c.Type == html.ElementNode {
+ result = append(result, m.MatchAll(c)...)
+ }
+ }
+ return
+ })
+}
+
+// Internal implementation to get all parent nodes, stopping at the specified
+// node (or nil if no stop).
+func getParentsNodes(nodes []*html.Node, stopm Matcher, stopNodes []*html.Node) []*html.Node {
+ return mapNodes(nodes, func(i int, n *html.Node) (result []*html.Node) {
+ for p := n.Parent; p != nil; p = p.Parent {
+ sel := newSingleSelection(p, nil)
+ if stopm != nil {
+ if sel.IsMatcher(stopm) {
+ break
+ }
+ } else if len(stopNodes) > 0 {
+ if sel.IsNodes(stopNodes...) {
+ break
+ }
+ }
+ if p.Type == html.ElementNode {
+ result = append(result, p)
+ }
+ }
+ return
+ })
+}
+
+// Internal implementation of sibling nodes that return a raw slice of matches.
+func getSiblingNodes(nodes []*html.Node, st siblingType, untilm Matcher, untilNodes []*html.Node) []*html.Node {
+ var f func(*html.Node) bool
+
+ // If the requested siblings are ...Until, create the test function to
+ // determine if the until condition is reached (returns true if it is)
+ if st == siblingNextUntil || st == siblingPrevUntil {
+ f = func(n *html.Node) bool {
+ if untilm != nil {
+ // Matcher-based condition
+ sel := newSingleSelection(n, nil)
+ return sel.IsMatcher(untilm)
+ } else if len(untilNodes) > 0 {
+ // Nodes-based condition
+ sel := newSingleSelection(n, nil)
+ return sel.IsNodes(untilNodes...)
+ }
+ return false
+ }
+ }
+
+ return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
+ return getChildrenWithSiblingType(n.Parent, st, n, f)
+ })
+}
+
+// Gets the children nodes of each node in the specified slice of nodes,
+// based on the sibling type request.
+func getChildrenNodes(nodes []*html.Node, st siblingType) []*html.Node {
+ return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
+ return getChildrenWithSiblingType(n, st, nil, nil)
+ })
+}
+
+// Gets the children of the specified parent, based on the requested sibling
+// type, skipping a specified node if required.
+func getChildrenWithSiblingType(parent *html.Node, st siblingType, skipNode *html.Node,
+ untilFunc func(*html.Node) bool) (result []*html.Node) {
+
+ // Create the iterator function
+ var iter = func(cur *html.Node) (ret *html.Node) {
+ // Based on the sibling type requested, iterate the right way
+ for {
+ switch st {
+ case siblingAll, siblingAllIncludingNonElements:
+ if cur == nil {
+ // First iteration, start with first child of parent
+ // Skip node if required
+ if ret = parent.FirstChild; ret == skipNode && skipNode != nil {
+ ret = skipNode.NextSibling
+ }
+ } else {
+ // Skip node if required
+ if ret = cur.NextSibling; ret == skipNode && skipNode != nil {
+ ret = skipNode.NextSibling
+ }
+ }
+ case siblingPrev, siblingPrevAll, siblingPrevUntil:
+ if cur == nil {
+ // Start with previous sibling of the skip node
+ ret = skipNode.PrevSibling
+ } else {
+ ret = cur.PrevSibling
+ }
+ case siblingNext, siblingNextAll, siblingNextUntil:
+ if cur == nil {
+ // Start with next sibling of the skip node
+ ret = skipNode.NextSibling
+ } else {
+ ret = cur.NextSibling
+ }
+ default:
+ panic("Invalid sibling type.")
+ }
+ if ret == nil || ret.Type == html.ElementNode || st == siblingAllIncludingNonElements {
+ return
+ }
+ // Not a valid node, try again from this one
+ cur = ret
+ }
+ }
+
+ for c := iter(nil); c != nil; c = iter(c) {
+ // If this is an ...Until case, test before append (returns true
+ // if the until condition is reached)
+ if st == siblingNextUntil || st == siblingPrevUntil {
+ if untilFunc(c) {
+ return
+ }
+ }
+ result = append(result, c)
+ if st == siblingNext || st == siblingPrev {
+ // Only one node was requested (immediate next or previous), so exit
+ return
+ }
+ }
+ return
+}
+
+// Internal implementation of parent nodes that return a raw slice of Nodes.
+func getParentNodes(nodes []*html.Node) []*html.Node {
+ return mapNodes(nodes, func(i int, n *html.Node) []*html.Node {
+ if n.Parent != nil && n.Parent.Type == html.ElementNode {
+ return []*html.Node{n.Parent}
+ }
+ return nil
+ })
+}
+
+// Internal map function used by many traversing methods. Takes the source nodes
+// to iterate on and the mapping function that returns an array of nodes.
+// Returns an array of nodes mapped by calling the callback function once for
+// each node in the source nodes.
+func mapNodes(nodes []*html.Node, f func(int, *html.Node) []*html.Node) (result []*html.Node) {
+ set := make(map[*html.Node]bool)
+ for i, n := range nodes {
+ if vals := f(i, n); len(vals) > 0 {
+ result = appendWithoutDuplicates(result, vals, set)
+ }
+ }
+ return result
+}
@@ -0,0 +1,203 @@
+package goquery
+
+import (
+ "errors"
+ "io"
+ "net/http"
+ "net/url"
+
+ "github.com/andybalholm/cascadia"
+ "golang.org/x/net/html"
+)
+
+// Document represents an HTML document to be manipulated. Unlike jQuery, which
+// is loaded as part of a DOM document, and thus acts upon its containing
+// document, GoQuery doesn't know which HTML document to act upon. So it needs
+// to be told, and that's what the Document class is for. It holds the root
+// document node to manipulate, and can make selections on this document.
+type Document struct {
+ *Selection
+ Url *url.URL
+ rootNode *html.Node
+}
+
+// NewDocumentFromNode is a Document constructor that takes a root html Node
+// as argument.
+func NewDocumentFromNode(root *html.Node) *Document {
+ return newDocument(root, nil)
+}
+
+// NewDocument is a Document constructor that takes a string URL as argument.
+// It loads the specified document, parses it, and stores the root Document
+// node, ready to be manipulated.
+//
+// Deprecated: Use the net/http standard library package to make the request
+// and validate the response before calling goquery.NewDocumentFromReader
+// with the response's body.
+func NewDocument(url string) (*Document, error) {
+ // Load the URL
+ res, e := http.Get(url)
+ if e != nil {
+ return nil, e
+ }
+ return NewDocumentFromResponse(res)
+}
+
+// NewDocumentFromReader returns a Document from an io.Reader.
+// It returns an error as second value if the reader's data cannot be parsed
+// as html. It does not check if the reader is also an io.Closer, the
+// provided reader is never closed by this call. It is the responsibility
+// of the caller to close it if required.
+func NewDocumentFromReader(r io.Reader) (*Document, error) {
+ root, e := html.Parse(r)
+ if e != nil {
+ return nil, e
+ }
+ return newDocument(root, nil), nil
+}
+
+// NewDocumentFromResponse is another Document constructor that takes an http response as argument.
+// It loads the specified response's document, parses it, and stores the root Document
+// node, ready to be manipulated. The response's body is closed on return.
+//
+// Deprecated: Use goquery.NewDocumentFromReader with the response's body.
+func NewDocumentFromResponse(res *http.Response) (*Document, error) {
+ if res == nil {
+ return nil, errors.New("Response is nil")
+ }
+ defer res.Body.Close()
+ if res.Request == nil {
+ return nil, errors.New("Response.Request is nil")
+ }
+
+ // Parse the HTML into nodes
+ root, e := html.Parse(res.Body)
+ if e != nil {
+ return nil, e
+ }
+
+ // Create and fill the document
+ return newDocument(root, res.Request.URL), nil
+}
+
+// CloneDocument creates a deep-clone of a document.
+func CloneDocument(doc *Document) *Document {
+ return newDocument(cloneNode(doc.rootNode), doc.Url)
+}
+
+// Private constructor, make sure all fields are correctly filled.
+func newDocument(root *html.Node, url *url.URL) *Document {
+ // Create and fill the document
+ d := &Document{nil, url, root}
+ d.Selection = newSingleSelection(root, d)
+ return d
+}
+
+// Selection represents a collection of nodes matching some criteria. The
+// initial Selection can be created by using Document.Find, and then
+// manipulated using the jQuery-like chainable syntax and methods.
+type Selection struct {
+ Nodes []*html.Node
+ document *Document
+ prevSel *Selection
+}
+
+// Helper constructor to create an empty selection
+func newEmptySelection(doc *Document) *Selection {
+ return &Selection{nil, doc, nil}
+}
+
+// Helper constructor to create a selection of only one node
+func newSingleSelection(node *html.Node, doc *Document) *Selection {
+ return &Selection{[]*html.Node{node}, doc, nil}
+}
+
+// Matcher is an interface that defines the methods to match
+// HTML nodes against a compiled selector string. Cascadia's
+// Selector implements this interface.
+type Matcher interface {
+ Match(*html.Node) bool
+ MatchAll(*html.Node) []*html.Node
+ Filter([]*html.Node) []*html.Node
+}
+
+// Single compiles a selector string to a Matcher that stops after the first
+// match is found.
+//
+// By default, Selection.Find and other functions that accept a selector string
+// to select nodes will use all matches corresponding to that selector. By
+// using the Matcher returned by Single, at most the first match will be
+// selected.
+//
+// For example, those two statements are semantically equivalent:
+//
+// sel1 := doc.Find("a").First()
+// sel2 := doc.FindMatcher(goquery.Single("a"))
+//
+// The one using Single is optimized to be potentially much faster on large
+// documents.
+//
+// Only the behaviour of the MatchAll method of the Matcher interface is
+// altered compared to standard Matchers. This means that the single-selection
+// property of the Matcher only applies for Selection methods where the Matcher
+// is used to select nodes, not to filter or check if a node matches the
+// Matcher - in those cases, the behaviour of the Matcher is unchanged (e.g.
+// FilterMatcher(Single("div")) will still result in a Selection with multiple
+// "div"s if there were many "div"s in the Selection to begin with).
+func Single(selector string) Matcher {
+ return singleMatcher{compileMatcher(selector)}
+}
+
+// SingleMatcher returns a Matcher matches the same nodes as m, but that stops
+// after the first match is found.
+//
+// See the documentation of function Single for more details.
+func SingleMatcher(m Matcher) Matcher {
+ if _, ok := m.(singleMatcher); ok {
+ // m is already a singleMatcher
+ return m
+ }
+ return singleMatcher{m}
+}
+
+// compileMatcher compiles the selector string s and returns
+// the corresponding Matcher. If s is an invalid selector string,
+// it returns a Matcher that fails all matches.
+func compileMatcher(s string) Matcher {
+ cs, err := cascadia.Compile(s)
+ if err != nil {
+ return invalidMatcher{}
+ }
+ return cs
+}
+
+type singleMatcher struct {
+ Matcher
+}
+
+func (m singleMatcher) MatchAll(n *html.Node) []*html.Node {
+ // Optimized version - stops finding at the first match (cascadia-compiled
+ // matchers all use this code path).
+ if mm, ok := m.Matcher.(interface{ MatchFirst(*html.Node) *html.Node }); ok {
+ node := mm.MatchFirst(n)
+ if node == nil {
+ return nil
+ }
+ return []*html.Node{node}
+ }
+
+ // Fallback version, for e.g. test mocks that don't provide the MatchFirst
+ // method.
+ nodes := m.Matcher.MatchAll(n)
+ if len(nodes) > 0 {
+ return nodes[:1:1]
+ }
+ return nil
+}
+
+// invalidMatcher is a Matcher that always fails to match.
+type invalidMatcher struct{}
+
+func (invalidMatcher) Match(n *html.Node) bool { return false }
+func (invalidMatcher) MatchAll(n *html.Node) []*html.Node { return nil }
+func (invalidMatcher) Filter(ns []*html.Node) []*html.Node { return nil }
@@ -0,0 +1,178 @@
+package goquery
+
+import (
+ "bytes"
+ "io"
+
+ "golang.org/x/net/html"
+)
+
+// used to determine if a set (map[*html.Node]bool) should be used
+// instead of iterating over a slice. The set uses more memory and
+// is slower than slice iteration for small N.
+const minNodesForSet = 1000
+
+var nodeNames = []string{
+ html.ErrorNode: "#error",
+ html.TextNode: "#text",
+ html.DocumentNode: "#document",
+ html.CommentNode: "#comment",
+}
+
+// NodeName returns the node name of the first element in the selection.
+// It tries to behave in a similar way as the DOM's nodeName property
+// (https://developer.mozilla.org/en-US/docs/Web/API/Node/nodeName).
+//
+// Go's net/html package defines the following node types, listed with
+// the corresponding returned value from this function:
+//
+// ErrorNode : #error
+// TextNode : #text
+// DocumentNode : #document
+// ElementNode : the element's tag name
+// CommentNode : #comment
+// DoctypeNode : the name of the document type
+//
+func NodeName(s *Selection) string {
+ if s.Length() == 0 {
+ return ""
+ }
+ return nodeName(s.Get(0))
+}
+
+// nodeName returns the node name of the given html node.
+// See NodeName for additional details on behaviour.
+func nodeName(node *html.Node) string {
+ if node == nil {
+ return ""
+ }
+
+ switch node.Type {
+ case html.ElementNode, html.DoctypeNode:
+ return node.Data
+ default:
+ if int(node.Type) < len(nodeNames) {
+ return nodeNames[node.Type]
+ }
+ return ""
+ }
+}
+
+// Render renders the HTML of the first item in the selection and writes it to
+// the writer. It behaves the same as OuterHtml but writes to w instead of
+// returning the string.
+func Render(w io.Writer, s *Selection) error {
+ if s.Length() == 0 {
+ return nil
+ }
+ n := s.Get(0)
+ return html.Render(w, n)
+}
+
+// OuterHtml returns the outer HTML rendering of the first item in
+// the selection - that is, the HTML including the first element's
+// tag and attributes.
+//
+// Unlike Html, this is a function and not a method on the Selection,
+// because this is not a jQuery method (in javascript-land, this is
+// a property provided by the DOM).
+func OuterHtml(s *Selection) (string, error) {
+ var buf bytes.Buffer
+ if err := Render(&buf, s); err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+// Loop through all container nodes to search for the target node.
+func sliceContains(container []*html.Node, contained *html.Node) bool {
+ for _, n := range container {
+ if nodeContains(n, contained) {
+ return true
+ }
+ }
+
+ return false
+}
+
+// Checks if the contained node is within the container node.
+func nodeContains(container *html.Node, contained *html.Node) bool {
+ // Check if the parent of the contained node is the container node, traversing
+ // upward until the top is reached, or the container is found.
+ for contained = contained.Parent; contained != nil; contained = contained.Parent {
+ if container == contained {
+ return true
+ }
+ }
+ return false
+}
+
+// Checks if the target node is in the slice of nodes.
+func isInSlice(slice []*html.Node, node *html.Node) bool {
+ return indexInSlice(slice, node) > -1
+}
+
+// Returns the index of the target node in the slice, or -1.
+func indexInSlice(slice []*html.Node, node *html.Node) int {
+ if node != nil {
+ for i, n := range slice {
+ if n == node {
+ return i
+ }
+ }
+ }
+ return -1
+}
+
+// Appends the new nodes to the target slice, making sure no duplicate is added.
+// There is no check to the original state of the target slice, so it may still
+// contain duplicates. The target slice is returned because append() may create
+// a new underlying array. If targetSet is nil, a local set is created with the
+// target if len(target) + len(nodes) is greater than minNodesForSet.
+func appendWithoutDuplicates(target []*html.Node, nodes []*html.Node, targetSet map[*html.Node]bool) []*html.Node {
+ // if there are not that many nodes, don't use the map, faster to just use nested loops
+ // (unless a non-nil targetSet is passed, in which case the caller knows better).
+ if targetSet == nil && len(target)+len(nodes) < minNodesForSet {
+ for _, n := range nodes {
+ if !isInSlice(target, n) {
+ target = append(target, n)
+ }
+ }
+ return target
+ }
+
+ // if a targetSet is passed, then assume it is reliable, otherwise create one
+ // and initialize it with the current target contents.
+ if targetSet == nil {
+ targetSet = make(map[*html.Node]bool, len(target))
+ for _, n := range target {
+ targetSet[n] = true
+ }
+ }
+ for _, n := range nodes {
+ if !targetSet[n] {
+ target = append(target, n)
+ targetSet[n] = true
+ }
+ }
+
+ return target
+}
+
+// Loop through a selection, returning only those nodes that pass the predicate
+// function.
+func grep(sel *Selection, predicate func(i int, s *Selection) bool) (result []*html.Node) {
+ for i, n := range sel.Nodes {
+ if predicate(i, newSingleSelection(n, sel.document)) {
+ result = append(result, n)
+ }
+ }
+ return result
+}
+
+// Creates a new Selection object based on the specified nodes, and keeps the
+// source Selection object on the stack (linked list).
+func pushStack(fromSel *Selection, nodes []*html.Node) *Selection {
+ result := &Selection{nodes, fromSel.document, fromSel}
+ return result
+}
@@ -0,0 +1,17 @@
+root = true
+
+[*]
+indent_style = tab
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+
+[*.xml]
+indent_style = space
+indent_size = 2
+insert_final_newline = false
+
+[*.yml]
+indent_style = space
+indent_size = 2
@@ -0,0 +1,25 @@
+# Binaries for programs and plugins
+.git
+.idea
+.vscode
+.hermit
+*.exe
+*.dll
+*.so
+*.dylib
+/cmd/chroma/chroma
+
+# Test binary, build with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
+# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736
+.glide/
+
+_models/
+
+_examples/
+*.min.*
+build/
@@ -0,0 +1,95 @@
+run:
+ tests: true
+ skip-dirs:
+ - _examples
+
+output:
+ print-issued-lines: false
+
+linters:
+ enable-all: true
+ disable:
+ - maligned
+ - megacheck
+ - lll
+ - gocyclo
+ - dupl
+ - gochecknoglobals
+ - funlen
+ - godox
+ - wsl
+ - gomnd
+ - gocognit
+ - goerr113
+ - nolintlint
+ - testpackage
+ - godot
+ - nestif
+ - paralleltest
+ - nlreturn
+ - cyclop
+ - exhaustivestruct
+ - gci
+ - gofumpt
+ - errorlint
+ - exhaustive
+ - ifshort
+ - wrapcheck
+ - stylecheck
+ - thelper
+ - nonamedreturns
+ - revive
+ - dupword
+ - exhaustruct
+ - varnamelen
+ - forcetypeassert
+ - ireturn
+ - maintidx
+ - govet
+ - nosnakecase
+ - testableexamples
+ - musttag
+ - depguard
+ - goconst
+ - perfsprint
+ - mnd
+ - predeclared
+
+linters-settings:
+ govet:
+ check-shadowing: true
+ gocyclo:
+ min-complexity: 10
+ dupl:
+ threshold: 100
+ goconst:
+ min-len: 8
+ min-occurrences: 3
+ forbidigo:
+ #forbid:
+ # - (Must)?NewLexer$
+ exclude_godoc_examples: false
+
+
+issues:
+ max-per-linter: 0
+ max-same: 0
+ exclude-use-default: false
+ exclude:
+ # Captured by errcheck.
+ - '^(G104|G204):'
+ # Very commonly not checked.
+ - 'Error return value of .(.*\.Help|.*\.MarkFlagRequired|(os\.)?std(out|err)\..*|.*Close|.*Flush|os\.Remove(All)?|.*printf?|os\.(Un)?Setenv). is not checked'
+ - 'exported method (.*\.MarshalJSON|.*\.UnmarshalJSON|.*\.EntityURN|.*\.GoString|.*\.Pos) should have comment or be unexported'
+ - 'composite literal uses unkeyed fields'
+ - 'declaration of "err" shadows declaration'
+ - 'should not use dot imports'
+ - 'Potential file inclusion via variable'
+ - 'should have comment or be unexported'
+ - 'comment on exported var .* should be of the form'
+ - 'at least one file in a package should have a package comment'
+ - 'string literal contains the Unicode'
+ - 'methods on the same type should have the same receiver name'
+ - '_TokenType_name should be _TokenTypeName'
+ - '`_TokenType_map` should be `_TokenTypeMap`'
+ - 'rewrite if-else to switch statement'
@@ -0,0 +1,37 @@
+project_name: chroma
+release:
+ github:
+ owner: alecthomas
+ name: chroma
+brews:
+ -
+ install: bin.install "chroma"
+env:
+ - CGO_ENABLED=0
+builds:
+- goos:
+ - linux
+ - darwin
+ - windows
+ goarch:
+ - arm64
+ - amd64
+ - "386"
+ goarm:
+ - "6"
+ dir: ./cmd/chroma
+ main: .
+ ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}}
+ binary: chroma
+archives:
+ -
+ format: tar.gz
+ name_template: '{{ .Binary }}-{{ .Version }}-{{ .Os }}-{{ .Arch }}{{ if .Arm }}v{{
+ .Arm }}{{ end }}'
+ files:
+ - COPYING
+ - README*
+snapshot:
+ name_template: SNAPSHOT-{{ .Commit }}
+checksum:
+ name_template: '{{ .ProjectName }}-{{ .Version }}-checksums.txt'
@@ -0,0 +1,24 @@
+VERSION = %(git describe --tags --dirty --always)%
+export CGOENABLED = 0
+
+tokentype_enumer.go: types.go
+ build: go generate
+
+# Regenerate the list of lexers in the README
+README.md: lexers/*.go lexers/*/*.xml table.py
+ build: ./table.py
+ -clean
+
+implicit %{1}%{2}.min.%{3}: **/*.{css,js}
+ build: esbuild --bundle %{IN} --minify --outfile=%{OUT}
+
+implicit build/%{1}: cmd/*
+ cd cmd/%{1}
+ inputs: cmd/%{1}/**/* **/*.go
+ build: go build -ldflags="-X 'main.version=%{VERSION}'" -o ../../build/%{1} .
+
+#upload: chromad
+# build:
+# scp chromad root@swapoff.org:
+# ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart'
+# touch upload
@@ -0,0 +1,19 @@
+Copyright (C) 2017 Alec Thomas
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
@@ -0,0 +1,23 @@
+.PHONY: chromad upload all
+
+VERSION ?= $(shell git describe --tags --dirty --always)
+export GOOS ?= linux
+export GOARCH ?= amd64
+
+all: README.md tokentype_string.go
+
+README.md: lexers/*/*.go
+ ./table.py
+
+tokentype_string.go: types.go
+ go generate
+
+chromad:
+ rm -rf build
+ esbuild --bundle cmd/chromad/static/index.js --minify --outfile=cmd/chromad/static/index.min.js
+ esbuild --bundle cmd/chromad/static/index.css --minify --outfile=cmd/chromad/static/index.min.css
+ (export CGOENABLED=0 ; cd ./cmd/chromad && go build -ldflags="-X 'main.version=$(VERSION)'" -o ../../build/chromad .)
+
+upload: build/chromad
+ scp build/chromad root@swapoff.org: && \
+ ssh root@swapoff.org 'install -m755 ./chromad /srv/http/swapoff.org/bin && service chromad restart'
@@ -0,0 +1,297 @@
+# Chroma — A general purpose syntax highlighter in pure Go
+
+[](https://godoc.org/github.com/alecthomas/chroma) [](https://github.com/alecthomas/chroma/actions/workflows/ci.yml) [](https://invite.slack.golangbridge.org/)
+
+Chroma takes source code and other structured text and converts it into syntax
+highlighted HTML, ANSI-coloured text, etc.
+
+Chroma is based heavily on [Pygments](http://pygments.org/), and includes
+translators for Pygments lexers and styles.
+
+## Table of Contents
+
+<!-- TOC -->
+
+1. [Supported languages](#supported-languages)
+2. [Try it](#try-it)
+3. [Using the library](#using-the-library)
+ 1. [Quick start](#quick-start)
+ 2. [Identifying the language](#identifying-the-language)
+ 3. [Formatting the output](#formatting-the-output)
+ 4. [The HTML formatter](#the-html-formatter)
+4. [More detail](#more-detail)
+ 1. [Lexers](#lexers)
+ 2. [Formatters](#formatters)
+ 3. [Styles](#styles)
+5. [Command-line interface](#command-line-interface)
+6. [Testing lexers](#testing-lexers)
+7. [What's missing compared to Pygments?](#whats-missing-compared-to-pygments)
+
+<!-- /TOC -->
+
+## Supported languages
+
+| Prefix | Language |
+| :----: | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| A | ABAP, ABNF, ActionScript, ActionScript 3, Ada, Agda, AL, Alloy, Angular2, ANTLR, ApacheConf, APL, AppleScript, ArangoDB AQL, Arduino, ArmAsm, AutoHotkey, AutoIt, Awk |
+| B | Ballerina, Bash, Bash Session, Batchfile, BibTeX, Bicep, BlitzBasic, BNF, BQN, Brainfuck |
+| C | C, C#, C++, Caddyfile, Caddyfile Directives, Cap'n Proto, Cassandra CQL, Ceylon, CFEngine3, cfstatement, ChaiScript, Chapel, Cheetah, Clojure, CMake, COBOL, CoffeeScript, Common Lisp, Coq, Crystal, CSS, Cython |
+| D | D, Dart, Dax, Desktop Entry, Diff, Django/Jinja, dns, Docker, DTD, Dylan |
+| E | EBNF, Elixir, Elm, EmacsLisp, Erlang |
+| F | Factor, Fennel, Fish, Forth, Fortran, FortranFixed, FSharp |
+| G | GAS, GDScript, Genshi, Genshi HTML, Genshi Text, Gherkin, Gleam, GLSL, Gnuplot, Go, Go HTML Template, Go Text Template, GraphQL, Groff, Groovy |
+| H | Handlebars, Hare, Haskell, Haxe, HCL, Hexdump, HLB, HLSL, HolyC, HTML, HTTP, Hy |
+| I | Idris, Igor, INI, Io, ISCdhcpd |
+| J | J, Java, JavaScript, JSON, Jsonnet, Julia, Jungle |
+| K | Kotlin |
+| L | Lighttpd configuration file, LLVM, Lua |
+| M | Makefile, Mako, markdown, Mason, Materialize SQL dialect, Mathematica, Matlab, MCFunction, Meson, Metal, MiniZinc, MLIR, Modula-2, MonkeyC, MorrowindScript, Myghty, MySQL |
+| N | NASM, Natural, Newspeak, Nginx configuration file, Nim, Nix, NSIS |
+| O | Objective-C, OCaml, Octave, Odin, OnesEnterprise, OpenEdge ABL, OpenSCAD, Org Mode |
+| P | PacmanConf, Perl, PHP, PHTML, Pig, PkgConfig, PL/pgSQL, plaintext, Plutus Core, Pony, PostgreSQL SQL dialect, PostScript, POVRay, PowerQuery, PowerShell, Prolog, PromQL, Promela, properties, Protocol Buffer, PRQL, PSL, Puppet, Python, Python 2 |
+| Q | QBasic, QML |
+| R | R, Racket, Ragel, Raku, react, ReasonML, reg, Rego, reStructuredText, Rexx, RPMSpec, Ruby, Rust |
+| S | SAS, Sass, Scala, Scheme, Scilab, SCSS, Sed, Sieve, Smali, Smalltalk, Smarty, SNBT, Snobol, Solidity, SourcePawn, SPARQL, SQL, SquidConf, Standard ML, stas, Stylus, Svelte, Swift, SYSTEMD, systemverilog |
+| T | TableGen, Tal, TASM, Tcl, Tcsh, Termcap, Terminfo, Terraform, TeX, Thrift, TOML, TradingView, Transact-SQL, Turing, Turtle, Twig, TypeScript, TypoScript, TypoScriptCssData, TypoScriptHtmlData, Typst |
+| V | V, V shell, Vala, VB.net, verilog, VHDL, VHS, VimL, vue |
+| W | WDTE, WebGPU Shading Language, Whiley |
+| X | XML, Xorg |
+| Y | YAML, YANG |
+| Z | Z80 Assembly, Zed, Zig |
+
+_I will attempt to keep this section up to date, but an authoritative list can be
+displayed with `chroma --list`._
+
+## Try it
+
+Try out various languages and styles on the [Chroma Playground](https://swapoff.org/chroma/playground/).
+
+## Using the library
+
+This is version 2 of Chroma, use the import path:
+
+```go
+import "github.com/alecthomas/chroma/v2"
+```
+
+Chroma, like Pygments, has the concepts of
+[lexers](https://github.com/alecthomas/chroma/tree/master/lexers),
+[formatters](https://github.com/alecthomas/chroma/tree/master/formatters) and
+[styles](https://github.com/alecthomas/chroma/tree/master/styles).
+
+Lexers convert source text into a stream of tokens, styles specify how token
+types are mapped to colours, and formatters convert tokens and styles into
+formatted output.
+
+A package exists for each of these, containing a global `Registry` variable
+with all of the registered implementations. There are also helper functions
+for using the registry in each package, such as looking up lexers by name or
+matching filenames, etc.
+
+In all cases, if a lexer, formatter or style can not be determined, `nil` will
+be returned. In this situation you may want to default to the `Fallback`
+value in each respective package, which provides sane defaults.
+
+### Quick start
+
+A convenience function exists that can be used to simply format some source
+text, without any effort:
+
+```go
+err := quick.Highlight(os.Stdout, someSourceCode, "go", "html", "monokai")
+```
+
+### Identifying the language
+
+To highlight code, you'll first have to identify what language the code is
+written in. There are three primary ways to do that:
+
+1. Detect the language from its filename.
+
+ ```go
+ lexer := lexers.Match("foo.go")
+ ```
+
+2. Explicitly specify the language by its Chroma syntax ID (a full list is available from `lexers.Names()`).
+
+ ```go
+ lexer := lexers.Get("go")
+ ```
+
+3. Detect the language from its content.
+
+ ```go
+ lexer := lexers.Analyse("package main\n\nfunc main()\n{\n}\n")
+ ```
+
+In all cases, `nil` will be returned if the language can not be identified.
+
+```go
+if lexer == nil {
+ lexer = lexers.Fallback
+}
+```
+
+At this point, it should be noted that some lexers can be extremely chatty. To
+mitigate this, you can use the coalescing lexer to coalesce runs of identical
+token types into a single token:
+
+```go
+lexer = chroma.Coalesce(lexer)
+```
+
+### Formatting the output
+
+Once a language is identified you will need to pick a formatter and a style (theme).
+
+```go
+style := styles.Get("swapoff")
+if style == nil {
+ style = styles.Fallback
+}
+formatter := formatters.Get("html")
+if formatter == nil {
+ formatter = formatters.Fallback
+}
+```
+
+Then obtain an iterator over the tokens:
+
+```go
+contents, err := ioutil.ReadAll(r)
+iterator, err := lexer.Tokenise(nil, string(contents))
+```
+
+And finally, format the tokens from the iterator:
+
+```go
+err := formatter.Format(w, style, iterator)
+```
+
+### The HTML formatter
+
+By default the `html` registered formatter generates standalone HTML with
+embedded CSS. More flexibility is available through the `formatters/html` package.
+
+Firstly, the output generated by the formatter can be customised with the
+following constructor options:
+
+- `Standalone()` - generate standalone HTML with embedded CSS.
+- `WithClasses()` - use classes rather than inlined style attributes.
+- `ClassPrefix(prefix)` - prefix each generated CSS class.
+- `TabWidth(width)` - Set the rendered tab width, in characters.
+- `WithLineNumbers()` - Render line numbers (style with `LineNumbers`).
+- `WithLinkableLineNumbers()` - Make the line numbers linkable and be a link to themselves.
+- `HighlightLines(ranges)` - Highlight lines in these ranges (style with `LineHighlight`).
+- `LineNumbersInTable()` - Use a table for formatting line numbers and code, rather than spans.
+
+If `WithClasses()` is used, the corresponding CSS can be obtained from the formatter with:
+
+```go
+formatter := html.New(html.WithClasses(true))
+err := formatter.WriteCSS(w, style)
+```
+
+## More detail
+
+### Lexers
+
+See the [Pygments documentation](http://pygments.org/docs/lexerdevelopment/)
+for details on implementing lexers. Most concepts apply directly to Chroma,
+but see existing lexer implementations for real examples.
+
+In many cases lexers can be automatically converted directly from Pygments by
+using the included Python 3 script `pygments2chroma_xml.py`. I use something like
+the following:
+
+```sh
+python3 _tools/pygments2chroma_xml.py \
+ pygments.lexers.jvm.KotlinLexer \
+ > lexers/embedded/kotlin.xml
+```
+
+See notes in [pygments-lexers.txt](https://github.com/alecthomas/chroma/blob/master/pygments-lexers.txt)
+for a list of lexers, and notes on some of the issues importing them.
+
+### Formatters
+
+Chroma supports HTML output, as well as terminal output in 8 colour, 256 colour, and true-colour.
+
+A `noop` formatter is included that outputs the token text only, and a `tokens`
+formatter outputs raw tokens. The latter is useful for debugging lexers.
+
+### Styles
+
+Chroma styles are defined in XML. The style entries use the
+[same syntax](http://pygments.org/docs/styles/) as Pygments.
+
+All Pygments styles have been converted to Chroma using the `_tools/style.py`
+script.
+
+When you work with one of [Chroma's styles](https://github.com/alecthomas/chroma/tree/master/styles),
+know that the `Background` token type provides the default style for tokens. It does so
+by defining a foreground color and background color.
+
+For example, this gives each token name not defined in the style a default color
+of `#f8f8f8` and uses `#000000` for the highlighted code block's background:
+
+```xml
+<entry type="Background" style="#f8f8f2 bg:#000000"/>
+```
+
+Also, token types in a style file are hierarchical. For instance, when `CommentSpecial` is not defined, Chroma uses the token style from `Comment`. So when several comment tokens use the same color, you'll only need to define `Comment` and override the one that has a different color.
+
+For a quick overview of the available styles and how they look, check out the [Chroma Style Gallery](https://xyproto.github.io/splash/docs/).
+
+## Command-line interface
+
+A command-line interface to Chroma is included.
+
+Binaries are available to install from [the releases page](https://github.com/alecthomas/chroma/releases).
+
+The CLI can be used as a preprocessor to colorise output of `less(1)`,
+see documentation for the `LESSOPEN` environment variable.
+
+The `--fail` flag can be used to suppress output and return with exit status
+1 to facilitate falling back to some other preprocessor in case chroma
+does not resolve a specific lexer to use for the given file. For example:
+
+```shell
+export LESSOPEN='| p() { chroma --fail "$1" || cat "$1"; }; p "%s"'
+```
+
+Replace `cat` with your favourite fallback preprocessor.
+
+When invoked as `.lessfilter`, the `--fail` flag is automatically turned
+on under the hood for easy integration with [lesspipe shipping with
+Debian and derivatives](https://manpages.debian.org/lesspipe#USER_DEFINED_FILTERS);
+for that setup the `chroma` executable can be just symlinked to `~/.lessfilter`.
+
+## Testing lexers
+
+If you edit some lexers and want to try it, open a shell in `cmd/chromad` and run:
+
+```shell
+go run . --csrf-key=securekey
+```
+
+A Link will be printed. Open it in your Browser. Now you can test on the Playground with your local changes.
+
+If you want to run the tests and the lexers, open a shell in the root directory and run:
+
+```shell
+go test ./lexers
+```
+
+When updating or adding a lexer, please add tests. See [lexers/README.md](lexers/README.md) for more.
+
+## What's missing compared to Pygments?
+
+- Quite a few lexers, for various reasons (pull-requests welcome):
+ - Pygments lexers for complex languages often include custom code to
+ handle certain aspects, such as Raku's ability to nest code inside
+ regular expressions. These require time and effort to convert.
+ - I mostly only converted languages I had heard of, to reduce the porting cost.
+- Some more esoteric features of Pygments are omitted for simplicity.
+- Though the Chroma API supports content detection, very few languages support them.
+ I have plans to implement a statistical analyser at some point, but not enough time.
@@ -0,0 +1,35 @@
+package chroma
+
+// Coalesce is a Lexer interceptor that collapses runs of common types into a single token.
+func Coalesce(lexer Lexer) Lexer { return &coalescer{lexer} }
+
+type coalescer struct{ Lexer }
+
+func (d *coalescer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) {
+ var prev Token
+ it, err := d.Lexer.Tokenise(options, text)
+ if err != nil {
+ return nil, err
+ }
+ return func() Token {
+ for token := it(); token != (EOF); token = it() {
+ if len(token.Value) == 0 {
+ continue
+ }
+ if prev == EOF {
+ prev = token
+ } else {
+ if prev.Type == token.Type && len(prev.Value) < 8192 {
+ prev.Value += token.Value
+ } else {
+ out := prev
+ prev = token
+ return out
+ }
+ }
+ }
+ out := prev
+ prev = EOF
+ return out
+ }, nil
+}
@@ -0,0 +1,192 @@
+package chroma
+
+import (
+ "fmt"
+ "math"
+ "strconv"
+ "strings"
+)
+
+// ANSI2RGB maps ANSI colour names, as supported by Chroma, to hex RGB values.
+var ANSI2RGB = map[string]string{
+ "#ansiblack": "000000",
+ "#ansidarkred": "7f0000",
+ "#ansidarkgreen": "007f00",
+ "#ansibrown": "7f7fe0",
+ "#ansidarkblue": "00007f",
+ "#ansipurple": "7f007f",
+ "#ansiteal": "007f7f",
+ "#ansilightgray": "e5e5e5",
+ // Normal
+ "#ansidarkgray": "555555",
+ "#ansired": "ff0000",
+ "#ansigreen": "00ff00",
+ "#ansiyellow": "ffff00",
+ "#ansiblue": "0000ff",
+ "#ansifuchsia": "ff00ff",
+ "#ansiturquoise": "00ffff",
+ "#ansiwhite": "ffffff",
+
+ // Aliases without the "ansi" prefix, because...why?
+ "#black": "000000",
+ "#darkred": "7f0000",
+ "#darkgreen": "007f00",
+ "#brown": "7f7fe0",
+ "#darkblue": "00007f",
+ "#purple": "7f007f",
+ "#teal": "007f7f",
+ "#lightgray": "e5e5e5",
+ // Normal
+ "#darkgray": "555555",
+ "#red": "ff0000",
+ "#green": "00ff00",
+ "#yellow": "ffff00",
+ "#blue": "0000ff",
+ "#fuchsia": "ff00ff",
+ "#turquoise": "00ffff",
+ "#white": "ffffff",
+}
+
+// Colour represents an RGB colour.
+type Colour int32
+
+// NewColour creates a Colour directly from RGB values.
+func NewColour(r, g, b uint8) Colour {
+ return ParseColour(fmt.Sprintf("%02x%02x%02x", r, g, b))
+}
+
+// Distance between this colour and another.
+//
+// This uses the approach described here (https://www.compuphase.com/cmetric.htm).
+// This is not as accurate as LAB, et. al. but is *vastly* simpler and sufficient for our needs.
+func (c Colour) Distance(e2 Colour) float64 {
+ ar, ag, ab := int64(c.Red()), int64(c.Green()), int64(c.Blue())
+ br, bg, bb := int64(e2.Red()), int64(e2.Green()), int64(e2.Blue())
+ rmean := (ar + br) / 2
+ r := ar - br
+ g := ag - bg
+ b := ab - bb
+ return math.Sqrt(float64((((512 + rmean) * r * r) >> 8) + 4*g*g + (((767 - rmean) * b * b) >> 8)))
+}
+
+// Brighten returns a copy of this colour with its brightness adjusted.
+//
+// If factor is negative, the colour is darkened.
+//
+// Uses approach described here (http://www.pvladov.com/2012/09/make-color-lighter-or-darker.html).
+func (c Colour) Brighten(factor float64) Colour {
+ r := float64(c.Red())
+ g := float64(c.Green())
+ b := float64(c.Blue())
+
+ if factor < 0 {
+ factor++
+ r *= factor
+ g *= factor
+ b *= factor
+ } else {
+ r = (255-r)*factor + r
+ g = (255-g)*factor + g
+ b = (255-b)*factor + b
+ }
+ return NewColour(uint8(r), uint8(g), uint8(b))
+}
+
+// BrightenOrDarken brightens a colour if it is < 0.5 brightness or darkens if > 0.5 brightness.
+func (c Colour) BrightenOrDarken(factor float64) Colour {
+ if c.Brightness() < 0.5 {
+ return c.Brighten(factor)
+ }
+ return c.Brighten(-factor)
+}
+
+// ClampBrightness returns a copy of this colour with its brightness adjusted such that
+// it falls within the range [min, max] (or very close to it due to rounding errors).
+// The supplied values use the same [0.0, 1.0] range as Brightness.
+func (c Colour) ClampBrightness(min, max float64) Colour {
+ if !c.IsSet() {
+ return c
+ }
+
+ min = math.Max(min, 0)
+ max = math.Min(max, 1)
+ current := c.Brightness()
+ target := math.Min(math.Max(current, min), max)
+ if current == target {
+ return c
+ }
+
+ r := float64(c.Red())
+ g := float64(c.Green())
+ b := float64(c.Blue())
+ rgb := r + g + b
+ if target > current {
+ // Solve for x: target == ((255-r)*x + r + (255-g)*x + g + (255-b)*x + b) / 255 / 3
+ return c.Brighten((target*255*3 - rgb) / (255*3 - rgb))
+ }
+ // Solve for x: target == (r*(x+1) + g*(x+1) + b*(x+1)) / 255 / 3
+ return c.Brighten((target*255*3)/rgb - 1)
+}
+
+// Brightness of the colour (roughly) in the range 0.0 to 1.0.
+func (c Colour) Brightness() float64 {
+ return (float64(c.Red()) + float64(c.Green()) + float64(c.Blue())) / 255.0 / 3.0
+}
+
+// ParseColour in the forms #rgb, #rrggbb, #ansi<colour>, or #<colour>.
+// Will return an "unset" colour if invalid.
+func ParseColour(colour string) Colour {
+ colour = normaliseColour(colour)
+ n, err := strconv.ParseUint(colour, 16, 32)
+ if err != nil {
+ return 0
+ }
+ return Colour(n + 1) //nolint:gosec
+}
+
+// MustParseColour is like ParseColour except it panics if the colour is invalid.
+//
+// Will panic if colour is in an invalid format.
+func MustParseColour(colour string) Colour {
+ parsed := ParseColour(colour)
+ if !parsed.IsSet() {
+ panic(fmt.Errorf("invalid colour %q", colour))
+ }
+ return parsed
+}
+
+// IsSet returns true if the colour is set.
+func (c Colour) IsSet() bool { return c != 0 }
+
+func (c Colour) String() string { return fmt.Sprintf("#%06x", int(c-1)) }
+func (c Colour) GoString() string { return fmt.Sprintf("Colour(0x%06x)", int(c-1)) }
+
+// Red component of colour.
+func (c Colour) Red() uint8 { return uint8(((c - 1) >> 16) & 0xff) } //nolint:gosec
+
+// Green component of colour.
+func (c Colour) Green() uint8 { return uint8(((c - 1) >> 8) & 0xff) } //nolint:gosec
+
+// Blue component of colour.
+func (c Colour) Blue() uint8 { return uint8((c - 1) & 0xff) } //nolint:gosec
+
+// Colours is an orderable set of colours.
+type Colours []Colour
+
+func (c Colours) Len() int { return len(c) }
+func (c Colours) Swap(i, j int) { c[i], c[j] = c[j], c[i] }
+func (c Colours) Less(i, j int) bool { return c[i] < c[j] }
+
+// Convert colours to #rrggbb.
+func normaliseColour(colour string) string {
+ if ansi, ok := ANSI2RGB[colour]; ok {
+ return ansi
+ }
+ if strings.HasPrefix(colour, "#") {
+ colour = colour[1:]
+ if len(colour) == 3 {
+ return colour[0:1] + colour[0:1] + colour[1:2] + colour[1:2] + colour[2:3] + colour[2:3]
+ }
+ }
+ return colour
+}
@@ -0,0 +1,152 @@
+package chroma
+
+import (
+ "bytes"
+)
+
+type delegatingLexer struct {
+ root Lexer
+ language Lexer
+}
+
+// DelegatingLexer combines two lexers to handle the common case of a language embedded inside another, such as PHP
+// inside HTML or PHP inside plain text.
+//
+// It takes two lexer as arguments: a root lexer and a language lexer. First everything is scanned using the language
+// lexer, which must return "Other" for unrecognised tokens. Then all "Other" tokens are lexed using the root lexer.
+// Finally, these two sets of tokens are merged.
+//
+// The lexers from the template lexer package use this base lexer.
+func DelegatingLexer(root Lexer, language Lexer) Lexer {
+ return &delegatingLexer{
+ root: root,
+ language: language,
+ }
+}
+
+func (d *delegatingLexer) AnalyseText(text string) float32 {
+ return d.root.AnalyseText(text)
+}
+
+func (d *delegatingLexer) SetAnalyser(analyser func(text string) float32) Lexer {
+ d.root.SetAnalyser(analyser)
+ return d
+}
+
+func (d *delegatingLexer) SetRegistry(r *LexerRegistry) Lexer {
+ d.root.SetRegistry(r)
+ d.language.SetRegistry(r)
+ return d
+}
+
+func (d *delegatingLexer) Config() *Config {
+ return d.language.Config()
+}
+
+// An insertion is the character range where language tokens should be inserted.
+type insertion struct {
+ start, end int
+ tokens []Token
+}
+
+func (d *delegatingLexer) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
+ tokens, err := Tokenise(Coalesce(d.language), options, text)
+ if err != nil {
+ return nil, err
+ }
+ // Compute insertions and gather "Other" tokens.
+ others := &bytes.Buffer{}
+ insertions := []*insertion{}
+ var insert *insertion
+ offset := 0
+ var last Token
+ for _, t := range tokens {
+ if t.Type == Other {
+ if last != EOF && insert != nil && last.Type != Other {
+ insert.end = offset
+ }
+ others.WriteString(t.Value)
+ } else {
+ if last == EOF || last.Type == Other {
+ insert = &insertion{start: offset}
+ insertions = append(insertions, insert)
+ }
+ insert.tokens = append(insert.tokens, t)
+ }
+ last = t
+ offset += len(t.Value)
+ }
+
+ if len(insertions) == 0 {
+ return d.root.Tokenise(options, text)
+ }
+
+ // Lex the other tokens.
+ rootTokens, err := Tokenise(Coalesce(d.root), options, others.String())
+ if err != nil {
+ return nil, err
+ }
+
+ // Interleave the two sets of tokens.
+ var out []Token
+ offset = 0 // Offset into text.
+ tokenIndex := 0
+ nextToken := func() Token {
+ if tokenIndex >= len(rootTokens) {
+ return EOF
+ }
+ t := rootTokens[tokenIndex]
+ tokenIndex++
+ return t
+ }
+ insertionIndex := 0
+ nextInsertion := func() *insertion {
+ if insertionIndex >= len(insertions) {
+ return nil
+ }
+ i := insertions[insertionIndex]
+ insertionIndex++
+ return i
+ }
+ t := nextToken()
+ i := nextInsertion()
+ for t != EOF || i != nil {
+ // fmt.Printf("%d->%d:%q %d->%d:%q\n", offset, offset+len(t.Value), t.Value, i.start, i.end, Stringify(i.tokens...))
+ if t == EOF || (i != nil && i.start < offset+len(t.Value)) {
+ var l Token
+ l, t = splitToken(t, i.start-offset)
+ if l != EOF {
+ out = append(out, l)
+ offset += len(l.Value)
+ }
+ out = append(out, i.tokens...)
+ offset += i.end - i.start
+ if t == EOF {
+ t = nextToken()
+ }
+ i = nextInsertion()
+ } else {
+ out = append(out, t)
+ offset += len(t.Value)
+ t = nextToken()
+ }
+ }
+ return Literator(out...), nil
+}
+
+func splitToken(t Token, offset int) (l Token, r Token) {
+ if t == EOF {
+ return EOF, EOF
+ }
+ if offset == 0 {
+ return EOF, t
+ }
+ if offset == len(t.Value) {
+ return t, EOF
+ }
+ l = t.Clone()
+ r = t.Clone()
+ l.Value = l.Value[:offset]
+ r.Value = r.Value[offset:]
+ return
+}
@@ -0,0 +1,7 @@
+// Package chroma takes source code and other structured text and converts it into syntax highlighted HTML, ANSI-
+// coloured text, etc.
+//
+// Chroma is based heavily on Pygments, and includes translators for Pygments lexers and styles.
+//
+// For more information, go here: https://github.com/alecthomas/chroma
+package chroma
@@ -0,0 +1,218 @@
+package chroma
+
+import (
+ "fmt"
+)
+
+// An Emitter takes group matches and returns tokens.
+type Emitter interface {
+ // Emit tokens for the given regex groups.
+ Emit(groups []string, state *LexerState) Iterator
+}
+
+// SerialisableEmitter is an Emitter that can be serialised and deserialised to/from JSON.
+type SerialisableEmitter interface {
+ Emitter
+ EmitterKind() string
+}
+
+// EmitterFunc is a function that is an Emitter.
+type EmitterFunc func(groups []string, state *LexerState) Iterator
+
+// Emit tokens for groups.
+func (e EmitterFunc) Emit(groups []string, state *LexerState) Iterator {
+ return e(groups, state)
+}
+
+type Emitters []Emitter
+
+type byGroupsEmitter struct {
+ Emitters
+}
+
+// ByGroups emits a token for each matching group in the rule's regex.
+func ByGroups(emitters ...Emitter) Emitter {
+ return &byGroupsEmitter{Emitters: emitters}
+}
+
+func (b *byGroupsEmitter) EmitterKind() string { return "bygroups" }
+
+func (b *byGroupsEmitter) Emit(groups []string, state *LexerState) Iterator {
+ iterators := make([]Iterator, 0, len(groups)-1)
+ if len(b.Emitters) != len(groups)-1 {
+ iterators = append(iterators, Error.Emit(groups, state))
+ // panic(errors.Errorf("number of groups %q does not match number of emitters %v", groups, emitters))
+ } else {
+ for i, group := range groups[1:] {
+ if b.Emitters[i] != nil {
+ iterators = append(iterators, b.Emitters[i].Emit([]string{group}, state))
+ }
+ }
+ }
+ return Concaterator(iterators...)
+}
+
+// ByGroupNames emits a token for each named matching group in the rule's regex.
+func ByGroupNames(emitters map[string]Emitter) Emitter {
+ return EmitterFunc(func(groups []string, state *LexerState) Iterator {
+ iterators := make([]Iterator, 0, len(state.NamedGroups)-1)
+ if len(state.NamedGroups)-1 == 0 {
+ if emitter, ok := emitters[`0`]; ok {
+ iterators = append(iterators, emitter.Emit(groups, state))
+ } else {
+ iterators = append(iterators, Error.Emit(groups, state))
+ }
+ } else {
+ ruleRegex := state.Rules[state.State][state.Rule].Regexp
+ for i := 1; i < len(state.NamedGroups); i++ {
+ groupName := ruleRegex.GroupNameFromNumber(i)
+ group := state.NamedGroups[groupName]
+ if emitter, ok := emitters[groupName]; ok {
+ if emitter != nil {
+ iterators = append(iterators, emitter.Emit([]string{group}, state))
+ }
+ } else {
+ iterators = append(iterators, Error.Emit([]string{group}, state))
+ }
+ }
+ }
+ return Concaterator(iterators...)
+ })
+}
+
+// UsingByGroup emits tokens for the matched groups in the regex using a
+// sublexer. Used when lexing code blocks where the name of a sublexer is
+// contained within the block, for example on a Markdown text block or SQL
+// language block.
+//
+// An attempt to load the sublexer will be made using the captured value from
+// the text of the matched sublexerNameGroup. If a sublexer matching the
+// sublexerNameGroup is available, then tokens for the matched codeGroup will
+// be emitted using the sublexer. Otherwise, if no sublexer is available, then
+// tokens will be emitted from the passed emitter.
+//
+// Example:
+//
+// var Markdown = internal.Register(MustNewLexer(
+// &Config{
+// Name: "markdown",
+// Aliases: []string{"md", "mkd"},
+// Filenames: []string{"*.md", "*.mkd", "*.markdown"},
+// MimeTypes: []string{"text/x-markdown"},
+// },
+// Rules{
+// "root": {
+// {"^(```)(\\w+)(\\n)([\\w\\W]*?)(^```$)",
+// UsingByGroup(
+// 2, 4,
+// String, String, String, Text, String,
+// ),
+// nil,
+// },
+// },
+// },
+// ))
+//
+// See the lexers/markdown.go for the complete example.
+//
+// Note: panic's if the number of emitters does not equal the number of matched
+// groups in the regex.
+func UsingByGroup(sublexerNameGroup, codeGroup int, emitters ...Emitter) Emitter {
+ return &usingByGroup{
+ SublexerNameGroup: sublexerNameGroup,
+ CodeGroup: codeGroup,
+ Emitters: emitters,
+ }
+}
+
+type usingByGroup struct {
+ SublexerNameGroup int `xml:"sublexer_name_group"`
+ CodeGroup int `xml:"code_group"`
+ Emitters Emitters `xml:"emitters"`
+}
+
+func (u *usingByGroup) EmitterKind() string { return "usingbygroup" }
+func (u *usingByGroup) Emit(groups []string, state *LexerState) Iterator {
+ // bounds check
+ if len(u.Emitters) != len(groups)-1 {
+ panic("UsingByGroup expects number of emitters to be the same as len(groups)-1")
+ }
+
+ // grab sublexer
+ sublexer := state.Registry.Get(groups[u.SublexerNameGroup])
+
+ // build iterators
+ iterators := make([]Iterator, len(groups)-1)
+ for i, group := range groups[1:] {
+ if i == u.CodeGroup-1 && sublexer != nil {
+ var err error
+ iterators[i], err = sublexer.Tokenise(nil, groups[u.CodeGroup])
+ if err != nil {
+ panic(err)
+ }
+ } else if u.Emitters[i] != nil {
+ iterators[i] = u.Emitters[i].Emit([]string{group}, state)
+ }
+ }
+ return Concaterator(iterators...)
+}
+
+// UsingLexer returns an Emitter that uses a given Lexer for parsing and emitting.
+//
+// This Emitter is not serialisable.
+func UsingLexer(lexer Lexer) Emitter {
+ return EmitterFunc(func(groups []string, _ *LexerState) Iterator {
+ it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0])
+ if err != nil {
+ panic(err)
+ }
+ return it
+ })
+}
+
+type usingEmitter struct {
+ Lexer string `xml:"lexer,attr"`
+}
+
+func (u *usingEmitter) EmitterKind() string { return "using" }
+
+func (u *usingEmitter) Emit(groups []string, state *LexerState) Iterator {
+ if state.Registry == nil {
+ panic(fmt.Sprintf("no LexerRegistry available for Using(%q)", u.Lexer))
+ }
+ lexer := state.Registry.Get(u.Lexer)
+ if lexer == nil {
+ panic(fmt.Sprintf("no such lexer %q", u.Lexer))
+ }
+ it, err := lexer.Tokenise(&TokeniseOptions{State: "root", Nested: true}, groups[0])
+ if err != nil {
+ panic(err)
+ }
+ return it
+}
+
+// Using returns an Emitter that uses a given Lexer reference for parsing and emitting.
+//
+// The referenced lexer must be stored in the same LexerRegistry.
+func Using(lexer string) Emitter {
+ return &usingEmitter{Lexer: lexer}
+}
+
+type usingSelfEmitter struct {
+ State string `xml:"state,attr"`
+}
+
+func (u *usingSelfEmitter) EmitterKind() string { return "usingself" }
+
+func (u *usingSelfEmitter) Emit(groups []string, state *LexerState) Iterator {
+ it, err := state.Lexer.Tokenise(&TokeniseOptions{State: u.State, Nested: true}, groups[0])
+ if err != nil {
+ panic(err)
+ }
+ return it
+}
+
+// UsingSelf is like Using, but uses the current Lexer.
+func UsingSelf(stateName string) Emitter {
+ return &usingSelfEmitter{stateName}
+}
@@ -0,0 +1,43 @@
+package chroma
+
+import (
+ "io"
+)
+
+// A Formatter for Chroma lexers.
+type Formatter interface {
+ // Format returns a formatting function for tokens.
+ //
+ // If the iterator panics, the Formatter should recover.
+ Format(w io.Writer, style *Style, iterator Iterator) error
+}
+
+// A FormatterFunc is a Formatter implemented as a function.
+//
+// Guards against iterator panics.
+type FormatterFunc func(w io.Writer, style *Style, iterator Iterator) error
+
+func (f FormatterFunc) Format(w io.Writer, s *Style, it Iterator) (err error) { // nolint
+ defer func() {
+ if perr := recover(); perr != nil {
+ err = perr.(error)
+ }
+ }()
+ return f(w, s, it)
+}
+
+type recoveringFormatter struct {
+ Formatter
+}
+
+func (r recoveringFormatter) Format(w io.Writer, s *Style, it Iterator) (err error) {
+ defer func() {
+ if perr := recover(); perr != nil {
+ err = perr.(error)
+ }
+ }()
+ return r.Formatter.Format(w, s, it)
+}
+
+// RecoveringFormatter wraps a formatter with panic recovery.
+func RecoveringFormatter(formatter Formatter) Formatter { return recoveringFormatter{formatter} }
@@ -0,0 +1,57 @@
+package formatters
+
+import (
+ "io"
+ "sort"
+
+ "github.com/alecthomas/chroma/v2"
+ "github.com/alecthomas/chroma/v2/formatters/html"
+ "github.com/alecthomas/chroma/v2/formatters/svg"
+)
+
+var (
+ // NoOp formatter.
+ NoOp = Register("noop", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, iterator chroma.Iterator) error {
+ for t := iterator(); t != chroma.EOF; t = iterator() {
+ if _, err := io.WriteString(w, t.Value); err != nil {
+ return err
+ }
+ }
+ return nil
+ }))
+ // Default HTML formatter outputs self-contained HTML.
+ htmlFull = Register("html", html.New(html.Standalone(true), html.WithClasses(true))) // nolint
+ SVG = Register("svg", svg.New(svg.EmbedFont("Liberation Mono", svg.FontLiberationMono, svg.WOFF)))
+)
+
+// Fallback formatter.
+var Fallback = NoOp
+
+// Registry of Formatters.
+var Registry = map[string]chroma.Formatter{}
+
+// Names of registered formatters.
+func Names() []string {
+ out := []string{}
+ for name := range Registry {
+ out = append(out, name)
+ }
+ sort.Strings(out)
+ return out
+}
+
+// Get formatter by name.
+//
+// If the given formatter is not found, the Fallback formatter will be returned.
+func Get(name string) chroma.Formatter {
+ if f, ok := Registry[name]; ok {
+ return f
+ }
+ return Fallback
+}
+
+// Register a named formatter.
+func Register(name string, formatter chroma.Formatter) chroma.Formatter {
+ Registry[name] = formatter
+ return formatter
+}
@@ -0,0 +1,623 @@
+package html
+
+import (
+ "fmt"
+ "html"
+ "io"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Option sets an option of the HTML formatter.
+type Option func(f *Formatter)
+
+// Standalone configures the HTML formatter for generating a standalone HTML document.
+func Standalone(b bool) Option { return func(f *Formatter) { f.standalone = b } }
+
+// ClassPrefix sets the CSS class prefix.
+func ClassPrefix(prefix string) Option { return func(f *Formatter) { f.prefix = prefix } }
+
+// WithClasses emits HTML using CSS classes, rather than inline styles.
+func WithClasses(b bool) Option { return func(f *Formatter) { f.Classes = b } }
+
+// WithAllClasses disables an optimisation that omits redundant CSS classes.
+func WithAllClasses(b bool) Option { return func(f *Formatter) { f.allClasses = b } }
+
+// WithCustomCSS sets user's custom CSS styles.
+func WithCustomCSS(css map[chroma.TokenType]string) Option {
+ return func(f *Formatter) {
+ f.customCSS = css
+ }
+}
+
+// TabWidth sets the number of characters for a tab. Defaults to 8.
+func TabWidth(width int) Option { return func(f *Formatter) { f.tabWidth = width } }
+
+// PreventSurroundingPre prevents the surrounding pre tags around the generated code.
+func PreventSurroundingPre(b bool) Option {
+ return func(f *Formatter) {
+ f.preventSurroundingPre = b
+
+ if b {
+ f.preWrapper = nopPreWrapper
+ } else {
+ f.preWrapper = defaultPreWrapper
+ }
+ }
+}
+
+// InlineCode creates inline code wrapped in a code tag.
+func InlineCode(b bool) Option {
+ return func(f *Formatter) {
+ f.inlineCode = b
+ f.preWrapper = preWrapper{
+ start: func(code bool, styleAttr string) string {
+ if code {
+ return fmt.Sprintf(`<code%s>`, styleAttr)
+ }
+
+ return ``
+ },
+ end: func(code bool) string {
+ if code {
+ return `</code>`
+ }
+
+ return ``
+ },
+ }
+ }
+}
+
+// WithPreWrapper allows control of the surrounding pre tags.
+func WithPreWrapper(wrapper PreWrapper) Option {
+ return func(f *Formatter) {
+ f.preWrapper = wrapper
+ }
+}
+
+// WrapLongLines wraps long lines.
+func WrapLongLines(b bool) Option {
+ return func(f *Formatter) {
+ f.wrapLongLines = b
+ }
+}
+
+// WithLineNumbers formats output with line numbers.
+func WithLineNumbers(b bool) Option {
+ return func(f *Formatter) {
+ f.lineNumbers = b
+ }
+}
+
+// LineNumbersInTable will, when combined with WithLineNumbers, separate the line numbers
+// and code in table td's, which make them copy-and-paste friendly.
+func LineNumbersInTable(b bool) Option {
+ return func(f *Formatter) {
+ f.lineNumbersInTable = b
+ }
+}
+
+// WithLinkableLineNumbers decorates the line numbers HTML elements with an "id"
+// attribute so they can be linked.
+func WithLinkableLineNumbers(b bool, prefix string) Option {
+ return func(f *Formatter) {
+ f.linkableLineNumbers = b
+ f.lineNumbersIDPrefix = prefix
+ }
+}
+
+// HighlightLines higlights the given line ranges with the Highlight style.
+//
+// A range is the beginning and ending of a range as 1-based line numbers, inclusive.
+func HighlightLines(ranges [][2]int) Option {
+ return func(f *Formatter) {
+ f.highlightRanges = ranges
+ sort.Sort(f.highlightRanges)
+ }
+}
+
+// BaseLineNumber sets the initial number to start line numbering at. Defaults to 1.
+func BaseLineNumber(n int) Option {
+ return func(f *Formatter) {
+ f.baseLineNumber = n
+ }
+}
+
+// New HTML formatter.
+func New(options ...Option) *Formatter {
+ f := &Formatter{
+ baseLineNumber: 1,
+ preWrapper: defaultPreWrapper,
+ }
+ f.styleCache = newStyleCache(f)
+ for _, option := range options {
+ option(f)
+ }
+ return f
+}
+
+// PreWrapper defines the operations supported in WithPreWrapper.
+type PreWrapper interface {
+ // Start is called to write a start <pre> element.
+ // The code flag tells whether this block surrounds
+ // highlighted code. This will be false when surrounding
+ // line numbers.
+ Start(code bool, styleAttr string) string
+
+ // End is called to write the end </pre> element.
+ End(code bool) string
+}
+
+type preWrapper struct {
+ start func(code bool, styleAttr string) string
+ end func(code bool) string
+}
+
+func (p preWrapper) Start(code bool, styleAttr string) string {
+ return p.start(code, styleAttr)
+}
+
+func (p preWrapper) End(code bool) string {
+ return p.end(code)
+}
+
+var (
+ nopPreWrapper = preWrapper{
+ start: func(code bool, styleAttr string) string { return "" },
+ end: func(code bool) string { return "" },
+ }
+ defaultPreWrapper = preWrapper{
+ start: func(code bool, styleAttr string) string {
+ if code {
+ return fmt.Sprintf(`<pre%s><code>`, styleAttr)
+ }
+
+ return fmt.Sprintf(`<pre%s>`, styleAttr)
+ },
+ end: func(code bool) string {
+ if code {
+ return `</code></pre>`
+ }
+
+ return `</pre>`
+ },
+ }
+)
+
+// Formatter that generates HTML.
+type Formatter struct {
+ styleCache *styleCache
+ standalone bool
+ prefix string
+ Classes bool // Exported field to detect when classes are being used
+ allClasses bool
+ customCSS map[chroma.TokenType]string
+ preWrapper PreWrapper
+ inlineCode bool
+ preventSurroundingPre bool
+ tabWidth int
+ wrapLongLines bool
+ lineNumbers bool
+ lineNumbersInTable bool
+ linkableLineNumbers bool
+ lineNumbersIDPrefix string
+ highlightRanges highlightRanges
+ baseLineNumber int
+}
+
+type highlightRanges [][2]int
+
+func (h highlightRanges) Len() int { return len(h) }
+func (h highlightRanges) Swap(i, j int) { h[i], h[j] = h[j], h[i] }
+func (h highlightRanges) Less(i, j int) bool { return h[i][0] < h[j][0] }
+
+func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) {
+ return f.writeHTML(w, style, iterator.Tokens())
+}
+
+// We deliberately don't use html/template here because it is two orders of magnitude slower (benchmarked).
+//
+// OTOH we need to be super careful about correct escaping...
+func (f *Formatter) writeHTML(w io.Writer, style *chroma.Style, tokens []chroma.Token) (err error) { // nolint: gocyclo
+ css := f.styleCache.get(style, true)
+ if f.standalone {
+ fmt.Fprint(w, "<html>\n")
+ if f.Classes {
+ fmt.Fprint(w, "<style type=\"text/css\">\n")
+ err = f.WriteCSS(w, style)
+ if err != nil {
+ return err
+ }
+ fmt.Fprintf(w, "body { %s; }\n", css[chroma.Background])
+ fmt.Fprint(w, "</style>")
+ }
+ fmt.Fprintf(w, "<body%s>\n", f.styleAttr(css, chroma.Background))
+ }
+
+ wrapInTable := f.lineNumbers && f.lineNumbersInTable
+
+ lines := chroma.SplitTokensIntoLines(tokens)
+ lineDigits := len(strconv.Itoa(f.baseLineNumber + len(lines) - 1))
+ highlightIndex := 0
+
+ if wrapInTable {
+ // List line numbers in its own <td>
+ fmt.Fprintf(w, "<div%s>\n", f.styleAttr(css, chroma.PreWrapper))
+ fmt.Fprintf(w, "<table%s><tr>", f.styleAttr(css, chroma.LineTable))
+ fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD))
+ fmt.Fprintf(w, "%s", f.preWrapper.Start(false, f.styleAttr(css, chroma.PreWrapper)))
+ for index := range lines {
+ line := f.baseLineNumber + index
+ highlight, next := f.shouldHighlight(highlightIndex, line)
+ if next {
+ highlightIndex++
+ }
+ if highlight {
+ fmt.Fprintf(w, "<span%s>", f.styleAttr(css, chroma.LineHighlight))
+ }
+
+ fmt.Fprintf(w, "<span%s%s>%s\n</span>", f.styleAttr(css, chroma.LineNumbersTable), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(css, lineDigits, line))
+
+ if highlight {
+ fmt.Fprintf(w, "</span>")
+ }
+ }
+ fmt.Fprint(w, f.preWrapper.End(false))
+ fmt.Fprint(w, "</td>\n")
+ fmt.Fprintf(w, "<td%s>\n", f.styleAttr(css, chroma.LineTableTD, "width:100%"))
+ }
+
+ fmt.Fprintf(w, "%s", f.preWrapper.Start(true, f.styleAttr(css, chroma.PreWrapper)))
+
+ highlightIndex = 0
+ for index, tokens := range lines {
+ // 1-based line number.
+ line := f.baseLineNumber + index
+ highlight, next := f.shouldHighlight(highlightIndex, line)
+ if next {
+ highlightIndex++
+ }
+
+ if !(f.preventSurroundingPre || f.inlineCode) {
+ // Start of Line
+ fmt.Fprint(w, `<span`)
+
+ if highlight {
+ // Line + LineHighlight
+ if f.Classes {
+ fmt.Fprintf(w, ` class="%s %s"`, f.class(chroma.Line), f.class(chroma.LineHighlight))
+ } else {
+ fmt.Fprintf(w, ` style="%s %s"`, css[chroma.Line], css[chroma.LineHighlight])
+ }
+ fmt.Fprint(w, `>`)
+ } else {
+ fmt.Fprintf(w, "%s>", f.styleAttr(css, chroma.Line))
+ }
+
+ // Line number
+ if f.lineNumbers && !wrapInTable {
+ fmt.Fprintf(w, "<span%s%s>%s</span>", f.styleAttr(css, chroma.LineNumbers), f.lineIDAttribute(line), f.lineTitleWithLinkIfNeeded(css, lineDigits, line))
+ }
+
+ fmt.Fprintf(w, `<span%s>`, f.styleAttr(css, chroma.CodeLine))
+ }
+
+ for _, token := range tokens {
+ html := html.EscapeString(token.String())
+ attr := f.styleAttr(css, token.Type)
+ if attr != "" {
+ html = fmt.Sprintf("<span%s>%s</span>", attr, html)
+ }
+ fmt.Fprint(w, html)
+ }
+
+ if !(f.preventSurroundingPre || f.inlineCode) {
+ fmt.Fprint(w, `</span>`) // End of CodeLine
+
+ fmt.Fprint(w, `</span>`) // End of Line
+ }
+ }
+ fmt.Fprintf(w, "%s", f.preWrapper.End(true))
+
+ if wrapInTable {
+ fmt.Fprint(w, "</td></tr></table>\n")
+ fmt.Fprint(w, "</div>\n")
+ }
+
+ if f.standalone {
+ fmt.Fprint(w, "\n</body>\n")
+ fmt.Fprint(w, "</html>\n")
+ }
+
+ return nil
+}
+
+func (f *Formatter) lineIDAttribute(line int) string {
+ if !f.linkableLineNumbers {
+ return ""
+ }
+ return fmt.Sprintf(" id=\"%s\"", f.lineID(line))
+}
+
+func (f *Formatter) lineTitleWithLinkIfNeeded(css map[chroma.TokenType]string, lineDigits, line int) string {
+ title := fmt.Sprintf("%*d", lineDigits, line)
+ if !f.linkableLineNumbers {
+ return title
+ }
+ return fmt.Sprintf("<a%s href=\"#%s\">%s</a>", f.styleAttr(css, chroma.LineLink), f.lineID(line), title)
+}
+
+func (f *Formatter) lineID(line int) string {
+ return fmt.Sprintf("%s%d", f.lineNumbersIDPrefix, line)
+}
+
+func (f *Formatter) shouldHighlight(highlightIndex, line int) (bool, bool) {
+ next := false
+ for highlightIndex < len(f.highlightRanges) && line > f.highlightRanges[highlightIndex][1] {
+ highlightIndex++
+ next = true
+ }
+ if highlightIndex < len(f.highlightRanges) {
+ hrange := f.highlightRanges[highlightIndex]
+ if line >= hrange[0] && line <= hrange[1] {
+ return true, next
+ }
+ }
+ return false, next
+}
+
+func (f *Formatter) class(t chroma.TokenType) string {
+ for t != 0 {
+ if cls, ok := chroma.StandardTypes[t]; ok {
+ if cls != "" {
+ return f.prefix + cls
+ }
+ return ""
+ }
+ t = t.Parent()
+ }
+ if cls := chroma.StandardTypes[t]; cls != "" {
+ return f.prefix + cls
+ }
+ return ""
+}
+
+func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType, extraCSS ...string) string {
+ if f.Classes {
+ cls := f.class(tt)
+ if cls == "" {
+ return ""
+ }
+ return fmt.Sprintf(` class="%s"`, cls)
+ }
+ if _, ok := styles[tt]; !ok {
+ tt = tt.SubCategory()
+ if _, ok := styles[tt]; !ok {
+ tt = tt.Category()
+ if _, ok := styles[tt]; !ok {
+ return ""
+ }
+ }
+ }
+ css := []string{styles[tt]}
+ css = append(css, extraCSS...)
+ return fmt.Sprintf(` style="%s"`, strings.Join(css, ";"))
+}
+
+func (f *Formatter) tabWidthStyle() string {
+ if f.tabWidth != 0 && f.tabWidth != 8 {
+ return fmt.Sprintf("-moz-tab-size: %[1]d; -o-tab-size: %[1]d; tab-size: %[1]d;", f.tabWidth)
+ }
+ return ""
+}
+
+// WriteCSS writes CSS style definitions (without any surrounding HTML).
+func (f *Formatter) WriteCSS(w io.Writer, style *chroma.Style) error {
+ css := f.styleCache.get(style, false)
+ // Special-case background as it is mapped to the outer ".chroma" class.
+ if _, err := fmt.Fprintf(w, "/* %s */ .%sbg { %s }\n", chroma.Background, f.prefix, css[chroma.Background]); err != nil {
+ return err
+ }
+ // Special-case PreWrapper as it is the ".chroma" class.
+ if _, err := fmt.Fprintf(w, "/* %s */ .%schroma { %s }\n", chroma.PreWrapper, f.prefix, css[chroma.PreWrapper]); err != nil {
+ return err
+ }
+ // Special-case code column of table to expand width.
+ if f.lineNumbers && f.lineNumbersInTable {
+ if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s:last-child { width: 100%%; }",
+ chroma.LineTableTD, f.prefix, f.class(chroma.LineTableTD)); err != nil {
+ return err
+ }
+ }
+ // Special-case line number highlighting when targeted.
+ if f.lineNumbers || f.lineNumbersInTable {
+ targetedLineCSS := StyleEntryToCSS(style.Get(chroma.LineHighlight))
+ for _, tt := range []chroma.TokenType{chroma.LineNumbers, chroma.LineNumbersTable} {
+ fmt.Fprintf(w, "/* %s targeted by URL anchor */ .%schroma .%s:target { %s }\n", tt, f.prefix, f.class(tt), targetedLineCSS)
+ }
+ }
+ tts := []int{}
+ for tt := range css {
+ tts = append(tts, int(tt))
+ }
+ sort.Ints(tts)
+ for _, ti := range tts {
+ tt := chroma.TokenType(ti)
+ switch tt {
+ case chroma.Background, chroma.PreWrapper:
+ continue
+ }
+ class := f.class(tt)
+ if class == "" {
+ continue
+ }
+ styles := css[tt]
+ if _, err := fmt.Fprintf(w, "/* %s */ .%schroma .%s { %s }\n", tt, f.prefix, class, styles); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (f *Formatter) styleToCSS(style *chroma.Style) map[chroma.TokenType]string {
+ classes := map[chroma.TokenType]string{}
+ bg := style.Get(chroma.Background)
+ // Convert the style.
+ for t := range chroma.StandardTypes {
+ entry := style.Get(t)
+ if t != chroma.Background {
+ entry = entry.Sub(bg)
+ }
+
+ // Inherit from custom CSS provided by user
+ tokenCategory := t.Category()
+ tokenSubCategory := t.SubCategory()
+ if t != tokenCategory {
+ if css, ok := f.customCSS[tokenCategory]; ok {
+ classes[t] = css
+ }
+ }
+ if tokenCategory != tokenSubCategory {
+ if css, ok := f.customCSS[tokenSubCategory]; ok {
+ classes[t] += css
+ }
+ }
+ // Add custom CSS provided by user
+ if css, ok := f.customCSS[t]; ok {
+ classes[t] += css
+ }
+
+ if !f.allClasses && entry.IsZero() && classes[t] == `` {
+ continue
+ }
+
+ styleEntryCSS := StyleEntryToCSS(entry)
+ if styleEntryCSS != `` && classes[t] != `` {
+ styleEntryCSS += `;`
+ }
+ classes[t] = styleEntryCSS + classes[t]
+ }
+ classes[chroma.Background] += `;` + f.tabWidthStyle()
+ classes[chroma.PreWrapper] += classes[chroma.Background]
+ // Make PreWrapper a grid to show highlight style with full width.
+ if len(f.highlightRanges) > 0 && f.customCSS[chroma.PreWrapper] == `` {
+ classes[chroma.PreWrapper] += `display: grid;`
+ }
+ // Make PreWrapper wrap long lines.
+ if f.wrapLongLines {
+ classes[chroma.PreWrapper] += `white-space: pre-wrap; word-break: break-word;`
+ }
+ lineNumbersStyle := `white-space: pre; -webkit-user-select: none; user-select: none; margin-right: 0.4em; padding: 0 0.4em 0 0.4em;`
+ // All rules begin with default rules followed by user provided rules
+ classes[chroma.Line] = `display: flex;` + classes[chroma.Line]
+ classes[chroma.LineNumbers] = lineNumbersStyle + classes[chroma.LineNumbers]
+ classes[chroma.LineNumbersTable] = lineNumbersStyle + classes[chroma.LineNumbersTable]
+ classes[chroma.LineTable] = "border-spacing: 0; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTable]
+ classes[chroma.LineTableTD] = "vertical-align: top; padding: 0; margin: 0; border: 0;" + classes[chroma.LineTableTD]
+ classes[chroma.LineLink] = "outline: none; text-decoration: none; color: inherit" + classes[chroma.LineLink]
+ return classes
+}
+
+// StyleEntryToCSS converts a chroma.StyleEntry to CSS attributes.
+func StyleEntryToCSS(e chroma.StyleEntry) string {
+ styles := []string{}
+ if e.Colour.IsSet() {
+ styles = append(styles, "color: "+e.Colour.String())
+ }
+ if e.Background.IsSet() {
+ styles = append(styles, "background-color: "+e.Background.String())
+ }
+ if e.Bold == chroma.Yes {
+ styles = append(styles, "font-weight: bold")
+ }
+ if e.Italic == chroma.Yes {
+ styles = append(styles, "font-style: italic")
+ }
+ if e.Underline == chroma.Yes {
+ styles = append(styles, "text-decoration: underline")
+ }
+ return strings.Join(styles, "; ")
+}
+
+// Compress CSS attributes - remove spaces, transform 6-digit colours to 3.
+func compressStyle(s string) string {
+ parts := strings.Split(s, ";")
+ out := []string{}
+ for _, p := range parts {
+ p = strings.Join(strings.Fields(p), " ")
+ p = strings.Replace(p, ": ", ":", 1)
+ if strings.Contains(p, "#") {
+ c := p[len(p)-6:]
+ if c[0] == c[1] && c[2] == c[3] && c[4] == c[5] {
+ p = p[:len(p)-6] + c[0:1] + c[2:3] + c[4:5]
+ }
+ }
+ out = append(out, p)
+ }
+ return strings.Join(out, ";")
+}
+
+const styleCacheLimit = 32
+
+type styleCacheEntry struct {
+ style *chroma.Style
+ compressed bool
+ cache map[chroma.TokenType]string
+}
+
+type styleCache struct {
+ mu sync.Mutex
+ // LRU cache of compiled (and possibly compressed) styles. This is a slice
+ // because the cache size is small, and a slice is sufficiently fast for
+ // small N.
+ cache []styleCacheEntry
+ f *Formatter
+}
+
+func newStyleCache(f *Formatter) *styleCache {
+ return &styleCache{f: f}
+}
+
+func (l *styleCache) get(style *chroma.Style, compress bool) map[chroma.TokenType]string {
+ l.mu.Lock()
+ defer l.mu.Unlock()
+
+ // Look for an existing entry.
+ for i := len(l.cache) - 1; i >= 0; i-- {
+ entry := l.cache[i]
+ if entry.style == style && entry.compressed == compress {
+ // Top of the cache, no need to adjust the order.
+ if i == len(l.cache)-1 {
+ return entry.cache
+ }
+ // Move this entry to the end of the LRU
+ copy(l.cache[i:], l.cache[i+1:])
+ l.cache[len(l.cache)-1] = entry
+ return entry.cache
+ }
+ }
+
+ // No entry, create one.
+ cached := l.f.styleToCSS(style)
+ if !l.f.Classes {
+ for t, style := range cached {
+ cached[t] = compressStyle(style)
+ }
+ }
+ if compress {
+ for t, style := range cached {
+ cached[t] = compressStyle(style)
+ }
+ }
+ // Evict the oldest entry.
+ if len(l.cache) >= styleCacheLimit {
+ l.cache = l.cache[0:copy(l.cache, l.cache[1:])]
+ }
+ l.cache = append(l.cache, styleCacheEntry{style: style, cache: cached, compressed: compress})
+ return cached
+}
@@ -0,0 +1,39 @@
+package formatters
+
+import (
+ "encoding/json"
+ "fmt"
+ "io"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// JSON formatter outputs the raw token structures as JSON.
+var JSON = Register("json", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error {
+ if _, err := fmt.Fprintln(w, "["); err != nil {
+ return err
+ }
+ i := 0
+ for t := it(); t != chroma.EOF; t = it() {
+ if i > 0 {
+ if _, err := fmt.Fprintln(w, ","); err != nil {
+ return err
+ }
+ }
+ i++
+ bytes, err := json.Marshal(t)
+ if err != nil {
+ return err
+ }
+ if _, err := fmt.Fprint(w, " "+string(bytes)); err != nil {
+ return err
+ }
+ }
+ if _, err := fmt.Fprintln(w); err != nil {
+ return err
+ }
+ if _, err := fmt.Fprintln(w, "]"); err != nil {
+ return err
+ }
+ return nil
+}))
@@ -0,0 +1,51 @@
+// Digitized data copyright (c) 2010 Google Corporation
+// with Reserved Font Arimo, Tinos and Cousine.
+// Copyright (c) 2012 Red Hat, Inc.
+// with Reserved Font Name Liberation.
+//
+// This Font Software is licensed under the SIL Open Font License, Version 1.1.
+// This license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL
+//
+// -----------------------------------------------------------
+// SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
+// -----------------------------------------------------------
+//
+// PREAMBLE
+// The goals of the Open Font License (OFL) are to stimulate worldwide development of collaborative font projects, to support the font creation efforts of academic and linguistic communities, and to provide a free and open framework in which fonts may be shared and improved in partnership with others.
+//
+// The OFL allows the licensed fonts to be used, studied, modified and redistributed freely as long as they are not sold by themselves. The fonts, including any derivative works, can be bundled, embedded, redistributed and/or sold with any software provided that any reserved names are not used by derivative works. The fonts and derivatives, however, cannot be released under any other type of license. The requirement for fonts to remain under this license does not apply to any document created using the fonts or their derivatives.
+//
+// DEFINITIONS
+// "Font Software" refers to the set of files released by the Copyright Holder(s) under this license and clearly marked as such. This may include source files, build scripts and documentation.
+//
+// "Reserved Font Name" refers to any names specified as such after the copyright statement(s).
+//
+// "Original Version" refers to the collection of Font Software components as distributed by the Copyright Holder(s).
+//
+// "Modified Version" refers to any derivative made by adding to, deleting, or substituting -- in part or in whole -- any of the components of the Original Version, by changing formats or by porting the Font Software to a new environment.
+//
+// "Author" refers to any designer, engineer, programmer, technical writer or other person who contributed to the Font Software.
+//
+// PERMISSION & CONDITIONS
+// Permission is hereby granted, free of charge, to any person obtaining a copy of the Font Software, to use, study, copy, merge, embed, modify, redistribute, and sell modified and unmodified copies of the Font Software, subject to the following conditions:
+//
+// 1) Neither the Font Software nor any of its individual components, in Original or Modified Versions, may be sold by itself.
+//
+// 2) Original or Modified Versions of the Font Software may be bundled, redistributed and/or sold with any software, provided that each copy contains the above copyright notice and this license. These can be included either as stand-alone text files, human-readable headers or in the appropriate machine-readable metadata fields within text or binary files as long as those fields can be easily viewed by the user.
+//
+// 3) No Modified Version of the Font Software may use the Reserved Font Name(s) unless explicit written permission is granted by the corresponding Copyright Holder. This restriction only applies to the primary font name as presented to the users.
+//
+// 4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font Software shall not be used to promote, endorse or advertise any Modified Version, except to acknowledge the contribution(s) of the Copyright Holder(s) and the Author(s) or with their explicit written permission.
+//
+// 5) The Font Software, modified or unmodified, in part or in whole, must be distributed entirely under this license, and must not be distributed under any other license. The requirement for fonts to remain under this license does not apply to any document created using the Font Software.
+//
+// TERMINATION
+// This license becomes null and void if any of the above conditions are not met.
+//
+// DISCLAIMER
+// THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM OTHER DEALINGS IN THE FONT SOFTWARE.
+
+package svg
+
+// Liberation Mono as base64 encoded woff (SIL Open Font License)[https://en.wikipedia.org/wiki/Liberation_fonts]
@@ -0,0 +1,222 @@
+// Package svg contains an SVG formatter.
+package svg
+
+import (
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "path"
+ "strings"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Option sets an option of the SVG formatter.
+type Option func(f *Formatter)
+
+// FontFamily sets the font-family.
+func FontFamily(fontFamily string) Option { return func(f *Formatter) { f.fontFamily = fontFamily } }
+
+// EmbedFontFile embeds given font file
+func EmbedFontFile(fontFamily string, fileName string) (option Option, err error) {
+ var format FontFormat
+ switch path.Ext(fileName) {
+ case ".woff":
+ format = WOFF
+ case ".woff2":
+ format = WOFF2
+ case ".ttf":
+ format = TRUETYPE
+ default:
+ return nil, errors.New("unexpected font file suffix")
+ }
+
+ var content []byte
+ if content, err = os.ReadFile(fileName); err == nil {
+ option = EmbedFont(fontFamily, base64.StdEncoding.EncodeToString(content), format)
+ }
+ return
+}
+
+// EmbedFont embeds given base64 encoded font
+func EmbedFont(fontFamily string, font string, format FontFormat) Option {
+ return func(f *Formatter) { f.fontFamily = fontFamily; f.embeddedFont = font; f.fontFormat = format }
+}
+
+// New SVG formatter.
+func New(options ...Option) *Formatter {
+ f := &Formatter{fontFamily: "Consolas, Monaco, Lucida Console, Liberation Mono, DejaVu Sans Mono, Bitstream Vera Sans Mono, Courier New, monospace"}
+ for _, option := range options {
+ option(f)
+ }
+ return f
+}
+
+// Formatter that generates SVG.
+type Formatter struct {
+ fontFamily string
+ embeddedFont string
+ fontFormat FontFormat
+}
+
+func (f *Formatter) Format(w io.Writer, style *chroma.Style, iterator chroma.Iterator) (err error) {
+ f.writeSVG(w, style, iterator.Tokens())
+ return err
+}
+
+var svgEscaper = strings.NewReplacer(
+ `&`, "&",
+ `<`, "<",
+ `>`, ">",
+ `"`, """,
+ ` `, " ",
+ ` `, "    ",
+)
+
+// EscapeString escapes special characters.
+func escapeString(s string) string {
+ return svgEscaper.Replace(s)
+}
+
+func (f *Formatter) writeSVG(w io.Writer, style *chroma.Style, tokens []chroma.Token) { // nolint: gocyclo
+ svgStyles := f.styleToSVG(style)
+ lines := chroma.SplitTokensIntoLines(tokens)
+
+ fmt.Fprint(w, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n")
+ fmt.Fprint(w, "<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\" \"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd\">\n")
+ fmt.Fprintf(w, "<svg width=\"%dpx\" height=\"%dpx\" xmlns=\"http://www.w3.org/2000/svg\">\n", 8*maxLineWidth(lines), 10+int(16.8*float64(len(lines)+1)))
+
+ if f.embeddedFont != "" {
+ f.writeFontStyle(w)
+ }
+
+ fmt.Fprintf(w, "<rect width=\"100%%\" height=\"100%%\" fill=\"%s\"/>\n", style.Get(chroma.Background).Background.String())
+ fmt.Fprintf(w, "<g font-family=\"%s\" font-size=\"14px\" fill=\"%s\">\n", f.fontFamily, style.Get(chroma.Text).Colour.String())
+
+ f.writeTokenBackgrounds(w, lines, style)
+
+ for index, tokens := range lines {
+ fmt.Fprintf(w, "<text x=\"0\" y=\"%fem\" xml:space=\"preserve\">", 1.2*float64(index+1))
+
+ for _, token := range tokens {
+ text := escapeString(token.String())
+ attr := f.styleAttr(svgStyles, token.Type)
+ if attr != "" {
+ text = fmt.Sprintf("<tspan %s>%s</tspan>", attr, text)
+ }
+ fmt.Fprint(w, text)
+ }
+ fmt.Fprint(w, "</text>")
+ }
+
+ fmt.Fprint(w, "\n</g>\n")
+ fmt.Fprint(w, "</svg>\n")
+}
+
+func maxLineWidth(lines [][]chroma.Token) int {
+ maxWidth := 0
+ for _, tokens := range lines {
+ length := 0
+ for _, token := range tokens {
+ length += len(strings.ReplaceAll(token.String(), ` `, " "))
+ }
+ if length > maxWidth {
+ maxWidth = length
+ }
+ }
+ return maxWidth
+}
+
+// There is no background attribute for text in SVG so simply calculate the position and text
+// of tokens with a background color that differs from the default and add a rectangle for each before
+// adding the token.
+func (f *Formatter) writeTokenBackgrounds(w io.Writer, lines [][]chroma.Token, style *chroma.Style) {
+ for index, tokens := range lines {
+ lineLength := 0
+ for _, token := range tokens {
+ length := len(strings.ReplaceAll(token.String(), ` `, " "))
+ tokenBackground := style.Get(token.Type).Background
+ if tokenBackground.IsSet() && tokenBackground != style.Get(chroma.Background).Background {
+ fmt.Fprintf(w, "<rect id=\"%s\" x=\"%dch\" y=\"%fem\" width=\"%dch\" height=\"1.2em\" fill=\"%s\" />\n", escapeString(token.String()), lineLength, 1.2*float64(index)+0.25, length, style.Get(token.Type).Background.String())
+ }
+ lineLength += length
+ }
+ }
+}
+
+type FontFormat int
+
+// https://transfonter.org/formats
+const (
+ WOFF FontFormat = iota
+ WOFF2
+ TRUETYPE
+)
+
+var fontFormats = [...]string{
+ "woff",
+ "woff2",
+ "truetype",
+}
+
+func (f *Formatter) writeFontStyle(w io.Writer) {
+ fmt.Fprintf(w, `<style>
+@font-face {
+ font-family: '%s';
+ src: url(data:application/x-font-%s;charset=utf-8;base64,%s) format('%s');'
+ font-weight: normal;
+ font-style: normal;
+}
+</style>`, f.fontFamily, fontFormats[f.fontFormat], f.embeddedFont, fontFormats[f.fontFormat])
+}
+
+func (f *Formatter) styleAttr(styles map[chroma.TokenType]string, tt chroma.TokenType) string {
+ if _, ok := styles[tt]; !ok {
+ tt = tt.SubCategory()
+ if _, ok := styles[tt]; !ok {
+ tt = tt.Category()
+ if _, ok := styles[tt]; !ok {
+ return ""
+ }
+ }
+ }
+ return styles[tt]
+}
+
+func (f *Formatter) styleToSVG(style *chroma.Style) map[chroma.TokenType]string {
+ converted := map[chroma.TokenType]string{}
+ bg := style.Get(chroma.Background)
+ // Convert the style.
+ for t := range chroma.StandardTypes {
+ entry := style.Get(t)
+ if t != chroma.Background {
+ entry = entry.Sub(bg)
+ }
+ if entry.IsZero() {
+ continue
+ }
+ converted[t] = StyleEntryToSVG(entry)
+ }
+ return converted
+}
+
+// StyleEntryToSVG converts a chroma.StyleEntry to SVG attributes.
+func StyleEntryToSVG(e chroma.StyleEntry) string {
+ var styles []string
+
+ if e.Colour.IsSet() {
+ styles = append(styles, "fill=\""+e.Colour.String()+"\"")
+ }
+ if e.Bold == chroma.Yes {
+ styles = append(styles, "font-weight=\"bold\"")
+ }
+ if e.Italic == chroma.Yes {
+ styles = append(styles, "font-style=\"italic\"")
+ }
+ if e.Underline == chroma.Yes {
+ styles = append(styles, "text-decoration=\"underline\"")
+ }
+ return strings.Join(styles, " ")
+}
@@ -0,0 +1,18 @@
+package formatters
+
+import (
+ "fmt"
+ "io"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// Tokens formatter outputs the raw token structures.
+var Tokens = Register("tokens", chroma.FormatterFunc(func(w io.Writer, s *chroma.Style, it chroma.Iterator) error {
+ for t := it(); t != chroma.EOF; t = it() {
+ if _, err := fmt.Fprintln(w, t.GoString()); err != nil {
+ return err
+ }
+ }
+ return nil
+}))
@@ -0,0 +1,284 @@
+package formatters
+
+import (
+ "io"
+ "math"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+type ttyTable struct {
+ foreground map[chroma.Colour]string
+ background map[chroma.Colour]string
+}
+
+var c = chroma.MustParseColour
+
+var ttyTables = map[int]*ttyTable{
+ 8: {
+ foreground: map[chroma.Colour]string{
+ c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
+ c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
+ c("#555555"): "\033[1m\033[30m", c("#ff0000"): "\033[1m\033[31m", c("#00ff00"): "\033[1m\033[32m", c("#ffff00"): "\033[1m\033[33m",
+ c("#0000ff"): "\033[1m\033[34m", c("#ff00ff"): "\033[1m\033[35m", c("#00ffff"): "\033[1m\033[36m", c("#ffffff"): "\033[1m\033[37m",
+ },
+ background: map[chroma.Colour]string{
+ c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
+ c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
+ c("#555555"): "\033[1m\033[40m", c("#ff0000"): "\033[1m\033[41m", c("#00ff00"): "\033[1m\033[42m", c("#ffff00"): "\033[1m\033[43m",
+ c("#0000ff"): "\033[1m\033[44m", c("#ff00ff"): "\033[1m\033[45m", c("#00ffff"): "\033[1m\033[46m", c("#ffffff"): "\033[1m\033[47m",
+ },
+ },
+ 16: {
+ foreground: map[chroma.Colour]string{
+ c("#000000"): "\033[30m", c("#7f0000"): "\033[31m", c("#007f00"): "\033[32m", c("#7f7fe0"): "\033[33m",
+ c("#00007f"): "\033[34m", c("#7f007f"): "\033[35m", c("#007f7f"): "\033[36m", c("#e5e5e5"): "\033[37m",
+ c("#555555"): "\033[90m", c("#ff0000"): "\033[91m", c("#00ff00"): "\033[92m", c("#ffff00"): "\033[93m",
+ c("#0000ff"): "\033[94m", c("#ff00ff"): "\033[95m", c("#00ffff"): "\033[96m", c("#ffffff"): "\033[97m",
+ },
+ background: map[chroma.Colour]string{
+ c("#000000"): "\033[40m", c("#7f0000"): "\033[41m", c("#007f00"): "\033[42m", c("#7f7fe0"): "\033[43m",
+ c("#00007f"): "\033[44m", c("#7f007f"): "\033[45m", c("#007f7f"): "\033[46m", c("#e5e5e5"): "\033[47m",
+ c("#555555"): "\033[100m", c("#ff0000"): "\033[101m", c("#00ff00"): "\033[102m", c("#ffff00"): "\033[103m",
+ c("#0000ff"): "\033[104m", c("#ff00ff"): "\033[105m", c("#00ffff"): "\033[106m", c("#ffffff"): "\033[107m",
+ },
+ },
+ 256: {
+ foreground: map[chroma.Colour]string{
+ c("#000000"): "\033[38;5;0m", c("#800000"): "\033[38;5;1m", c("#008000"): "\033[38;5;2m", c("#808000"): "\033[38;5;3m",
+ c("#000080"): "\033[38;5;4m", c("#800080"): "\033[38;5;5m", c("#008080"): "\033[38;5;6m", c("#c0c0c0"): "\033[38;5;7m",
+ c("#808080"): "\033[38;5;8m", c("#ff0000"): "\033[38;5;9m", c("#00ff00"): "\033[38;5;10m", c("#ffff00"): "\033[38;5;11m",
+ c("#0000ff"): "\033[38;5;12m", c("#ff00ff"): "\033[38;5;13m", c("#00ffff"): "\033[38;5;14m", c("#ffffff"): "\033[38;5;15m",
+ c("#000000"): "\033[38;5;16m", c("#00005f"): "\033[38;5;17m", c("#000087"): "\033[38;5;18m", c("#0000af"): "\033[38;5;19m",
+ c("#0000d7"): "\033[38;5;20m", c("#0000ff"): "\033[38;5;21m", c("#005f00"): "\033[38;5;22m", c("#005f5f"): "\033[38;5;23m",
+ c("#005f87"): "\033[38;5;24m", c("#005faf"): "\033[38;5;25m", c("#005fd7"): "\033[38;5;26m", c("#005fff"): "\033[38;5;27m",
+ c("#008700"): "\033[38;5;28m", c("#00875f"): "\033[38;5;29m", c("#008787"): "\033[38;5;30m", c("#0087af"): "\033[38;5;31m",
+ c("#0087d7"): "\033[38;5;32m", c("#0087ff"): "\033[38;5;33m", c("#00af00"): "\033[38;5;34m", c("#00af5f"): "\033[38;5;35m",
+ c("#00af87"): "\033[38;5;36m", c("#00afaf"): "\033[38;5;37m", c("#00afd7"): "\033[38;5;38m", c("#00afff"): "\033[38;5;39m",
+ c("#00d700"): "\033[38;5;40m", c("#00d75f"): "\033[38;5;41m", c("#00d787"): "\033[38;5;42m", c("#00d7af"): "\033[38;5;43m",
+ c("#00d7d7"): "\033[38;5;44m", c("#00d7ff"): "\033[38;5;45m", c("#00ff00"): "\033[38;5;46m", c("#00ff5f"): "\033[38;5;47m",
+ c("#00ff87"): "\033[38;5;48m", c("#00ffaf"): "\033[38;5;49m", c("#00ffd7"): "\033[38;5;50m", c("#00ffff"): "\033[38;5;51m",
+ c("#5f0000"): "\033[38;5;52m", c("#5f005f"): "\033[38;5;53m", c("#5f0087"): "\033[38;5;54m", c("#5f00af"): "\033[38;5;55m",
+ c("#5f00d7"): "\033[38;5;56m", c("#5f00ff"): "\033[38;5;57m", c("#5f5f00"): "\033[38;5;58m", c("#5f5f5f"): "\033[38;5;59m",
+ c("#5f5f87"): "\033[38;5;60m", c("#5f5faf"): "\033[38;5;61m", c("#5f5fd7"): "\033[38;5;62m", c("#5f5fff"): "\033[38;5;63m",
+ c("#5f8700"): "\033[38;5;64m", c("#5f875f"): "\033[38;5;65m", c("#5f8787"): "\033[38;5;66m", c("#5f87af"): "\033[38;5;67m",
+ c("#5f87d7"): "\033[38;5;68m", c("#5f87ff"): "\033[38;5;69m", c("#5faf00"): "\033[38;5;70m", c("#5faf5f"): "\033[38;5;71m",
+ c("#5faf87"): "\033[38;5;72m", c("#5fafaf"): "\033[38;5;73m", c("#5fafd7"): "\033[38;5;74m", c("#5fafff"): "\033[38;5;75m",
+ c("#5fd700"): "\033[38;5;76m", c("#5fd75f"): "\033[38;5;77m", c("#5fd787"): "\033[38;5;78m", c("#5fd7af"): "\033[38;5;79m",
+ c("#5fd7d7"): "\033[38;5;80m", c("#5fd7ff"): "\033[38;5;81m", c("#5fff00"): "\033[38;5;82m", c("#5fff5f"): "\033[38;5;83m",
+ c("#5fff87"): "\033[38;5;84m", c("#5fffaf"): "\033[38;5;85m", c("#5fffd7"): "\033[38;5;86m", c("#5fffff"): "\033[38;5;87m",
+ c("#870000"): "\033[38;5;88m", c("#87005f"): "\033[38;5;89m", c("#870087"): "\033[38;5;90m", c("#8700af"): "\033[38;5;91m",
+ c("#8700d7"): "\033[38;5;92m", c("#8700ff"): "\033[38;5;93m", c("#875f00"): "\033[38;5;94m", c("#875f5f"): "\033[38;5;95m",
+ c("#875f87"): "\033[38;5;96m", c("#875faf"): "\033[38;5;97m", c("#875fd7"): "\033[38;5;98m", c("#875fff"): "\033[38;5;99m",
+ c("#878700"): "\033[38;5;100m", c("#87875f"): "\033[38;5;101m", c("#878787"): "\033[38;5;102m", c("#8787af"): "\033[38;5;103m",
+ c("#8787d7"): "\033[38;5;104m", c("#8787ff"): "\033[38;5;105m", c("#87af00"): "\033[38;5;106m", c("#87af5f"): "\033[38;5;107m",
+ c("#87af87"): "\033[38;5;108m", c("#87afaf"): "\033[38;5;109m", c("#87afd7"): "\033[38;5;110m", c("#87afff"): "\033[38;5;111m",
+ c("#87d700"): "\033[38;5;112m", c("#87d75f"): "\033[38;5;113m", c("#87d787"): "\033[38;5;114m", c("#87d7af"): "\033[38;5;115m",
+ c("#87d7d7"): "\033[38;5;116m", c("#87d7ff"): "\033[38;5;117m", c("#87ff00"): "\033[38;5;118m", c("#87ff5f"): "\033[38;5;119m",
+ c("#87ff87"): "\033[38;5;120m", c("#87ffaf"): "\033[38;5;121m", c("#87ffd7"): "\033[38;5;122m", c("#87ffff"): "\033[38;5;123m",
+ c("#af0000"): "\033[38;5;124m", c("#af005f"): "\033[38;5;125m", c("#af0087"): "\033[38;5;126m", c("#af00af"): "\033[38;5;127m",
+ c("#af00d7"): "\033[38;5;128m", c("#af00ff"): "\033[38;5;129m", c("#af5f00"): "\033[38;5;130m", c("#af5f5f"): "\033[38;5;131m",
+ c("#af5f87"): "\033[38;5;132m", c("#af5faf"): "\033[38;5;133m", c("#af5fd7"): "\033[38;5;134m", c("#af5fff"): "\033[38;5;135m",
+ c("#af8700"): "\033[38;5;136m", c("#af875f"): "\033[38;5;137m", c("#af8787"): "\033[38;5;138m", c("#af87af"): "\033[38;5;139m",
+ c("#af87d7"): "\033[38;5;140m", c("#af87ff"): "\033[38;5;141m", c("#afaf00"): "\033[38;5;142m", c("#afaf5f"): "\033[38;5;143m",
+ c("#afaf87"): "\033[38;5;144m", c("#afafaf"): "\033[38;5;145m", c("#afafd7"): "\033[38;5;146m", c("#afafff"): "\033[38;5;147m",
+ c("#afd700"): "\033[38;5;148m", c("#afd75f"): "\033[38;5;149m", c("#afd787"): "\033[38;5;150m", c("#afd7af"): "\033[38;5;151m",
+ c("#afd7d7"): "\033[38;5;152m", c("#afd7ff"): "\033[38;5;153m", c("#afff00"): "\033[38;5;154m", c("#afff5f"): "\033[38;5;155m",
+ c("#afff87"): "\033[38;5;156m", c("#afffaf"): "\033[38;5;157m", c("#afffd7"): "\033[38;5;158m", c("#afffff"): "\033[38;5;159m",
+ c("#d70000"): "\033[38;5;160m", c("#d7005f"): "\033[38;5;161m", c("#d70087"): "\033[38;5;162m", c("#d700af"): "\033[38;5;163m",
+ c("#d700d7"): "\033[38;5;164m", c("#d700ff"): "\033[38;5;165m", c("#d75f00"): "\033[38;5;166m", c("#d75f5f"): "\033[38;5;167m",
+ c("#d75f87"): "\033[38;5;168m", c("#d75faf"): "\033[38;5;169m", c("#d75fd7"): "\033[38;5;170m", c("#d75fff"): "\033[38;5;171m",
+ c("#d78700"): "\033[38;5;172m", c("#d7875f"): "\033[38;5;173m", c("#d78787"): "\033[38;5;174m", c("#d787af"): "\033[38;5;175m",
+ c("#d787d7"): "\033[38;5;176m", c("#d787ff"): "\033[38;5;177m", c("#d7af00"): "\033[38;5;178m", c("#d7af5f"): "\033[38;5;179m",
+ c("#d7af87"): "\033[38;5;180m", c("#d7afaf"): "\033[38;5;181m", c("#d7afd7"): "\033[38;5;182m", c("#d7afff"): "\033[38;5;183m",
+ c("#d7d700"): "\033[38;5;184m", c("#d7d75f"): "\033[38;5;185m", c("#d7d787"): "\033[38;5;186m", c("#d7d7af"): "\033[38;5;187m",
+ c("#d7d7d7"): "\033[38;5;188m", c("#d7d7ff"): "\033[38;5;189m", c("#d7ff00"): "\033[38;5;190m", c("#d7ff5f"): "\033[38;5;191m",
+ c("#d7ff87"): "\033[38;5;192m", c("#d7ffaf"): "\033[38;5;193m", c("#d7ffd7"): "\033[38;5;194m", c("#d7ffff"): "\033[38;5;195m",
+ c("#ff0000"): "\033[38;5;196m", c("#ff005f"): "\033[38;5;197m", c("#ff0087"): "\033[38;5;198m", c("#ff00af"): "\033[38;5;199m",
+ c("#ff00d7"): "\033[38;5;200m", c("#ff00ff"): "\033[38;5;201m", c("#ff5f00"): "\033[38;5;202m", c("#ff5f5f"): "\033[38;5;203m",
+ c("#ff5f87"): "\033[38;5;204m", c("#ff5faf"): "\033[38;5;205m", c("#ff5fd7"): "\033[38;5;206m", c("#ff5fff"): "\033[38;5;207m",
+ c("#ff8700"): "\033[38;5;208m", c("#ff875f"): "\033[38;5;209m", c("#ff8787"): "\033[38;5;210m", c("#ff87af"): "\033[38;5;211m",
+ c("#ff87d7"): "\033[38;5;212m", c("#ff87ff"): "\033[38;5;213m", c("#ffaf00"): "\033[38;5;214m", c("#ffaf5f"): "\033[38;5;215m",
+ c("#ffaf87"): "\033[38;5;216m", c("#ffafaf"): "\033[38;5;217m", c("#ffafd7"): "\033[38;5;218m", c("#ffafff"): "\033[38;5;219m",
+ c("#ffd700"): "\033[38;5;220m", c("#ffd75f"): "\033[38;5;221m", c("#ffd787"): "\033[38;5;222m", c("#ffd7af"): "\033[38;5;223m",
+ c("#ffd7d7"): "\033[38;5;224m", c("#ffd7ff"): "\033[38;5;225m", c("#ffff00"): "\033[38;5;226m", c("#ffff5f"): "\033[38;5;227m",
+ c("#ffff87"): "\033[38;5;228m", c("#ffffaf"): "\033[38;5;229m", c("#ffffd7"): "\033[38;5;230m", c("#ffffff"): "\033[38;5;231m",
+ c("#080808"): "\033[38;5;232m", c("#121212"): "\033[38;5;233m", c("#1c1c1c"): "\033[38;5;234m", c("#262626"): "\033[38;5;235m",
+ c("#303030"): "\033[38;5;236m", c("#3a3a3a"): "\033[38;5;237m", c("#444444"): "\033[38;5;238m", c("#4e4e4e"): "\033[38;5;239m",
+ c("#585858"): "\033[38;5;240m", c("#626262"): "\033[38;5;241m", c("#6c6c6c"): "\033[38;5;242m", c("#767676"): "\033[38;5;243m",
+ c("#808080"): "\033[38;5;244m", c("#8a8a8a"): "\033[38;5;245m", c("#949494"): "\033[38;5;246m", c("#9e9e9e"): "\033[38;5;247m",
+ c("#a8a8a8"): "\033[38;5;248m", c("#b2b2b2"): "\033[38;5;249m", c("#bcbcbc"): "\033[38;5;250m", c("#c6c6c6"): "\033[38;5;251m",
+ c("#d0d0d0"): "\033[38;5;252m", c("#dadada"): "\033[38;5;253m", c("#e4e4e4"): "\033[38;5;254m", c("#eeeeee"): "\033[38;5;255m",
+ },
+ background: map[chroma.Colour]string{
+ c("#000000"): "\033[48;5;0m", c("#800000"): "\033[48;5;1m", c("#008000"): "\033[48;5;2m", c("#808000"): "\033[48;5;3m",
+ c("#000080"): "\033[48;5;4m", c("#800080"): "\033[48;5;5m", c("#008080"): "\033[48;5;6m", c("#c0c0c0"): "\033[48;5;7m",
+ c("#808080"): "\033[48;5;8m", c("#ff0000"): "\033[48;5;9m", c("#00ff00"): "\033[48;5;10m", c("#ffff00"): "\033[48;5;11m",
+ c("#0000ff"): "\033[48;5;12m", c("#ff00ff"): "\033[48;5;13m", c("#00ffff"): "\033[48;5;14m", c("#ffffff"): "\033[48;5;15m",
+ c("#000000"): "\033[48;5;16m", c("#00005f"): "\033[48;5;17m", c("#000087"): "\033[48;5;18m", c("#0000af"): "\033[48;5;19m",
+ c("#0000d7"): "\033[48;5;20m", c("#0000ff"): "\033[48;5;21m", c("#005f00"): "\033[48;5;22m", c("#005f5f"): "\033[48;5;23m",
+ c("#005f87"): "\033[48;5;24m", c("#005faf"): "\033[48;5;25m", c("#005fd7"): "\033[48;5;26m", c("#005fff"): "\033[48;5;27m",
+ c("#008700"): "\033[48;5;28m", c("#00875f"): "\033[48;5;29m", c("#008787"): "\033[48;5;30m", c("#0087af"): "\033[48;5;31m",
+ c("#0087d7"): "\033[48;5;32m", c("#0087ff"): "\033[48;5;33m", c("#00af00"): "\033[48;5;34m", c("#00af5f"): "\033[48;5;35m",
+ c("#00af87"): "\033[48;5;36m", c("#00afaf"): "\033[48;5;37m", c("#00afd7"): "\033[48;5;38m", c("#00afff"): "\033[48;5;39m",
+ c("#00d700"): "\033[48;5;40m", c("#00d75f"): "\033[48;5;41m", c("#00d787"): "\033[48;5;42m", c("#00d7af"): "\033[48;5;43m",
+ c("#00d7d7"): "\033[48;5;44m", c("#00d7ff"): "\033[48;5;45m", c("#00ff00"): "\033[48;5;46m", c("#00ff5f"): "\033[48;5;47m",
+ c("#00ff87"): "\033[48;5;48m", c("#00ffaf"): "\033[48;5;49m", c("#00ffd7"): "\033[48;5;50m", c("#00ffff"): "\033[48;5;51m",
+ c("#5f0000"): "\033[48;5;52m", c("#5f005f"): "\033[48;5;53m", c("#5f0087"): "\033[48;5;54m", c("#5f00af"): "\033[48;5;55m",
+ c("#5f00d7"): "\033[48;5;56m", c("#5f00ff"): "\033[48;5;57m", c("#5f5f00"): "\033[48;5;58m", c("#5f5f5f"): "\033[48;5;59m",
+ c("#5f5f87"): "\033[48;5;60m", c("#5f5faf"): "\033[48;5;61m", c("#5f5fd7"): "\033[48;5;62m", c("#5f5fff"): "\033[48;5;63m",
+ c("#5f8700"): "\033[48;5;64m", c("#5f875f"): "\033[48;5;65m", c("#5f8787"): "\033[48;5;66m", c("#5f87af"): "\033[48;5;67m",
+ c("#5f87d7"): "\033[48;5;68m", c("#5f87ff"): "\033[48;5;69m", c("#5faf00"): "\033[48;5;70m", c("#5faf5f"): "\033[48;5;71m",
+ c("#5faf87"): "\033[48;5;72m", c("#5fafaf"): "\033[48;5;73m", c("#5fafd7"): "\033[48;5;74m", c("#5fafff"): "\033[48;5;75m",
+ c("#5fd700"): "\033[48;5;76m", c("#5fd75f"): "\033[48;5;77m", c("#5fd787"): "\033[48;5;78m", c("#5fd7af"): "\033[48;5;79m",
+ c("#5fd7d7"): "\033[48;5;80m", c("#5fd7ff"): "\033[48;5;81m", c("#5fff00"): "\033[48;5;82m", c("#5fff5f"): "\033[48;5;83m",
+ c("#5fff87"): "\033[48;5;84m", c("#5fffaf"): "\033[48;5;85m", c("#5fffd7"): "\033[48;5;86m", c("#5fffff"): "\033[48;5;87m",
+ c("#870000"): "\033[48;5;88m", c("#87005f"): "\033[48;5;89m", c("#870087"): "\033[48;5;90m", c("#8700af"): "\033[48;5;91m",
+ c("#8700d7"): "\033[48;5;92m", c("#8700ff"): "\033[48;5;93m", c("#875f00"): "\033[48;5;94m", c("#875f5f"): "\033[48;5;95m",
+ c("#875f87"): "\033[48;5;96m", c("#875faf"): "\033[48;5;97m", c("#875fd7"): "\033[48;5;98m", c("#875fff"): "\033[48;5;99m",
+ c("#878700"): "\033[48;5;100m", c("#87875f"): "\033[48;5;101m", c("#878787"): "\033[48;5;102m", c("#8787af"): "\033[48;5;103m",
+ c("#8787d7"): "\033[48;5;104m", c("#8787ff"): "\033[48;5;105m", c("#87af00"): "\033[48;5;106m", c("#87af5f"): "\033[48;5;107m",
+ c("#87af87"): "\033[48;5;108m", c("#87afaf"): "\033[48;5;109m", c("#87afd7"): "\033[48;5;110m", c("#87afff"): "\033[48;5;111m",
+ c("#87d700"): "\033[48;5;112m", c("#87d75f"): "\033[48;5;113m", c("#87d787"): "\033[48;5;114m", c("#87d7af"): "\033[48;5;115m",
+ c("#87d7d7"): "\033[48;5;116m", c("#87d7ff"): "\033[48;5;117m", c("#87ff00"): "\033[48;5;118m", c("#87ff5f"): "\033[48;5;119m",
+ c("#87ff87"): "\033[48;5;120m", c("#87ffaf"): "\033[48;5;121m", c("#87ffd7"): "\033[48;5;122m", c("#87ffff"): "\033[48;5;123m",
+ c("#af0000"): "\033[48;5;124m", c("#af005f"): "\033[48;5;125m", c("#af0087"): "\033[48;5;126m", c("#af00af"): "\033[48;5;127m",
+ c("#af00d7"): "\033[48;5;128m", c("#af00ff"): "\033[48;5;129m", c("#af5f00"): "\033[48;5;130m", c("#af5f5f"): "\033[48;5;131m",
+ c("#af5f87"): "\033[48;5;132m", c("#af5faf"): "\033[48;5;133m", c("#af5fd7"): "\033[48;5;134m", c("#af5fff"): "\033[48;5;135m",
+ c("#af8700"): "\033[48;5;136m", c("#af875f"): "\033[48;5;137m", c("#af8787"): "\033[48;5;138m", c("#af87af"): "\033[48;5;139m",
+ c("#af87d7"): "\033[48;5;140m", c("#af87ff"): "\033[48;5;141m", c("#afaf00"): "\033[48;5;142m", c("#afaf5f"): "\033[48;5;143m",
+ c("#afaf87"): "\033[48;5;144m", c("#afafaf"): "\033[48;5;145m", c("#afafd7"): "\033[48;5;146m", c("#afafff"): "\033[48;5;147m",
+ c("#afd700"): "\033[48;5;148m", c("#afd75f"): "\033[48;5;149m", c("#afd787"): "\033[48;5;150m", c("#afd7af"): "\033[48;5;151m",
+ c("#afd7d7"): "\033[48;5;152m", c("#afd7ff"): "\033[48;5;153m", c("#afff00"): "\033[48;5;154m", c("#afff5f"): "\033[48;5;155m",
+ c("#afff87"): "\033[48;5;156m", c("#afffaf"): "\033[48;5;157m", c("#afffd7"): "\033[48;5;158m", c("#afffff"): "\033[48;5;159m",
+ c("#d70000"): "\033[48;5;160m", c("#d7005f"): "\033[48;5;161m", c("#d70087"): "\033[48;5;162m", c("#d700af"): "\033[48;5;163m",
+ c("#d700d7"): "\033[48;5;164m", c("#d700ff"): "\033[48;5;165m", c("#d75f00"): "\033[48;5;166m", c("#d75f5f"): "\033[48;5;167m",
+ c("#d75f87"): "\033[48;5;168m", c("#d75faf"): "\033[48;5;169m", c("#d75fd7"): "\033[48;5;170m", c("#d75fff"): "\033[48;5;171m",
+ c("#d78700"): "\033[48;5;172m", c("#d7875f"): "\033[48;5;173m", c("#d78787"): "\033[48;5;174m", c("#d787af"): "\033[48;5;175m",
+ c("#d787d7"): "\033[48;5;176m", c("#d787ff"): "\033[48;5;177m", c("#d7af00"): "\033[48;5;178m", c("#d7af5f"): "\033[48;5;179m",
+ c("#d7af87"): "\033[48;5;180m", c("#d7afaf"): "\033[48;5;181m", c("#d7afd7"): "\033[48;5;182m", c("#d7afff"): "\033[48;5;183m",
+ c("#d7d700"): "\033[48;5;184m", c("#d7d75f"): "\033[48;5;185m", c("#d7d787"): "\033[48;5;186m", c("#d7d7af"): "\033[48;5;187m",
+ c("#d7d7d7"): "\033[48;5;188m", c("#d7d7ff"): "\033[48;5;189m", c("#d7ff00"): "\033[48;5;190m", c("#d7ff5f"): "\033[48;5;191m",
+ c("#d7ff87"): "\033[48;5;192m", c("#d7ffaf"): "\033[48;5;193m", c("#d7ffd7"): "\033[48;5;194m", c("#d7ffff"): "\033[48;5;195m",
+ c("#ff0000"): "\033[48;5;196m", c("#ff005f"): "\033[48;5;197m", c("#ff0087"): "\033[48;5;198m", c("#ff00af"): "\033[48;5;199m",
+ c("#ff00d7"): "\033[48;5;200m", c("#ff00ff"): "\033[48;5;201m", c("#ff5f00"): "\033[48;5;202m", c("#ff5f5f"): "\033[48;5;203m",
+ c("#ff5f87"): "\033[48;5;204m", c("#ff5faf"): "\033[48;5;205m", c("#ff5fd7"): "\033[48;5;206m", c("#ff5fff"): "\033[48;5;207m",
+ c("#ff8700"): "\033[48;5;208m", c("#ff875f"): "\033[48;5;209m", c("#ff8787"): "\033[48;5;210m", c("#ff87af"): "\033[48;5;211m",
+ c("#ff87d7"): "\033[48;5;212m", c("#ff87ff"): "\033[48;5;213m", c("#ffaf00"): "\033[48;5;214m", c("#ffaf5f"): "\033[48;5;215m",
+ c("#ffaf87"): "\033[48;5;216m", c("#ffafaf"): "\033[48;5;217m", c("#ffafd7"): "\033[48;5;218m", c("#ffafff"): "\033[48;5;219m",
+ c("#ffd700"): "\033[48;5;220m", c("#ffd75f"): "\033[48;5;221m", c("#ffd787"): "\033[48;5;222m", c("#ffd7af"): "\033[48;5;223m",
+ c("#ffd7d7"): "\033[48;5;224m", c("#ffd7ff"): "\033[48;5;225m", c("#ffff00"): "\033[48;5;226m", c("#ffff5f"): "\033[48;5;227m",
+ c("#ffff87"): "\033[48;5;228m", c("#ffffaf"): "\033[48;5;229m", c("#ffffd7"): "\033[48;5;230m", c("#ffffff"): "\033[48;5;231m",
+ c("#080808"): "\033[48;5;232m", c("#121212"): "\033[48;5;233m", c("#1c1c1c"): "\033[48;5;234m", c("#262626"): "\033[48;5;235m",
+ c("#303030"): "\033[48;5;236m", c("#3a3a3a"): "\033[48;5;237m", c("#444444"): "\033[48;5;238m", c("#4e4e4e"): "\033[48;5;239m",
+ c("#585858"): "\033[48;5;240m", c("#626262"): "\033[48;5;241m", c("#6c6c6c"): "\033[48;5;242m", c("#767676"): "\033[48;5;243m",
+ c("#808080"): "\033[48;5;244m", c("#8a8a8a"): "\033[48;5;245m", c("#949494"): "\033[48;5;246m", c("#9e9e9e"): "\033[48;5;247m",
+ c("#a8a8a8"): "\033[48;5;248m", c("#b2b2b2"): "\033[48;5;249m", c("#bcbcbc"): "\033[48;5;250m", c("#c6c6c6"): "\033[48;5;251m",
+ c("#d0d0d0"): "\033[48;5;252m", c("#dadada"): "\033[48;5;253m", c("#e4e4e4"): "\033[48;5;254m", c("#eeeeee"): "\033[48;5;255m",
+ },
+ },
+}
+
+func entryToEscapeSequence(table *ttyTable, entry chroma.StyleEntry) string {
+ out := ""
+ if entry.Bold == chroma.Yes {
+ out += "\033[1m"
+ }
+ if entry.Underline == chroma.Yes {
+ out += "\033[4m"
+ }
+ if entry.Italic == chroma.Yes {
+ out += "\033[3m"
+ }
+ if entry.Colour.IsSet() {
+ out += table.foreground[findClosest(table, entry.Colour)]
+ }
+ if entry.Background.IsSet() {
+ out += table.background[findClosest(table, entry.Background)]
+ }
+ return out
+}
+
+func findClosest(table *ttyTable, seeking chroma.Colour) chroma.Colour {
+ closestColour := chroma.Colour(0)
+ closest := float64(math.MaxFloat64)
+ for colour := range table.foreground {
+ distance := colour.Distance(seeking)
+ if distance < closest {
+ closest = distance
+ closestColour = colour
+ }
+ }
+ return closestColour
+}
+
+func styleToEscapeSequence(table *ttyTable, style *chroma.Style) map[chroma.TokenType]string {
+ style = clearBackground(style)
+ out := map[chroma.TokenType]string{}
+ for _, ttype := range style.Types() {
+ entry := style.Get(ttype)
+ out[ttype] = entryToEscapeSequence(table, entry)
+ }
+ return out
+}
+
+// Clear the background colour.
+func clearBackground(style *chroma.Style) *chroma.Style {
+ builder := style.Builder()
+ bg := builder.Get(chroma.Background)
+ bg.Background = 0
+ bg.NoInherit = true
+ builder.AddEntry(chroma.Background, bg)
+ style, _ = builder.Build()
+ return style
+}
+
+type indexedTTYFormatter struct {
+ table *ttyTable
+}
+
+func (c *indexedTTYFormatter) Format(w io.Writer, style *chroma.Style, it chroma.Iterator) (err error) {
+ theme := styleToEscapeSequence(c.table, style)
+ for token := it(); token != chroma.EOF; token = it() {
+ clr, ok := theme[token.Type]
+
+ // This search mimics how styles.Get() is used in tty_truecolour.go.
+ if !ok {
+ clr, ok = theme[token.Type.SubCategory()]
+ if !ok {
+ clr, ok = theme[token.Type.Category()]
+ if !ok {
+ clr, ok = theme[chroma.Text]
+ if !ok {
+ clr = theme[chroma.Background]
+ }
+ }
+ }
+ }
+
+ writeToken(w, clr, token.Value)
+ }
+ return nil
+}
+
+// TTY is an 8-colour terminal formatter.
+//
+// The Lab colour space is used to map RGB values to the most appropriate index colour.
+var TTY = Register("terminal", &indexedTTYFormatter{ttyTables[8]})
+
+// TTY8 is an 8-colour terminal formatter.
+//
+// The Lab colour space is used to map RGB values to the most appropriate index colour.
+var TTY8 = Register("terminal8", &indexedTTYFormatter{ttyTables[8]})
+
+// TTY16 is a 16-colour terminal formatter.
+//
+// It uses \033[3xm for normal colours and \033[90Xm for bright colours.
+//
+// The Lab colour space is used to map RGB values to the most appropriate index colour.
+var TTY16 = Register("terminal16", &indexedTTYFormatter{ttyTables[16]})
+
+// TTY256 is a 256-colour terminal formatter.
+//
+// The Lab colour space is used to map RGB values to the most appropriate index colour.
+var TTY256 = Register("terminal256", &indexedTTYFormatter{ttyTables[256]})
@@ -0,0 +1,76 @@
+package formatters
+
+import (
+ "fmt"
+ "io"
+ "regexp"
+
+ "github.com/alecthomas/chroma/v2"
+)
+
+// TTY16m is a true-colour terminal formatter.
+var TTY16m = Register("terminal16m", chroma.FormatterFunc(trueColourFormatter))
+
+var crOrCrLf = regexp.MustCompile(`\r?\n`)
+
+// Print the text with the given formatting, resetting the formatting at the end
+// of each line and resuming it on the next line.
+//
+// This way, a pager (like https://github.com/walles/moar for example) can show
+// any line in the output by itself, and it will get the right formatting.
+func writeToken(w io.Writer, formatting string, text string) {
+ if formatting == "" {
+ fmt.Fprint(w, text)
+ return
+ }
+
+ newlineIndices := crOrCrLf.FindAllStringIndex(text, -1)
+
+ afterLastNewline := 0
+ for _, indices := range newlineIndices {
+ newlineStart, afterNewline := indices[0], indices[1]
+ fmt.Fprint(w, formatting)
+ fmt.Fprint(w, text[afterLastNewline:newlineStart])
+ fmt.Fprint(w, "\033[0m")
+ fmt.Fprint(w, text[newlineStart:afterNewline])
+ afterLastNewline = afterNewline
+ }
+
+ if afterLastNewline < len(text) {
+ // Print whatever is left after the last newline
+ fmt.Fprint(w, formatting)
+ fmt.Fprint(w, text[afterLastNewline:])
+ fmt.Fprint(w, "\033[0m")
+ }
+}
+
+func trueColourFormatter(w io.Writer, style *chroma.Style, it chroma.Iterator) error {
+ style = clearBackground(style)
+ for token := it(); token != chroma.EOF; token = it() {
+ entry := style.Get(token.Type)
+ if entry.IsZero() {
+ fmt.Fprint(w, token.Value)
+ continue
+ }
+
+ formatting := ""
+ if entry.Bold == chroma.Yes {
+ formatting += "\033[1m"
+ }
+ if entry.Underline == chroma.Yes {
+ formatting += "\033[4m"
+ }
+ if entry.Italic == chroma.Yes {
+ formatting += "\033[3m"
+ }
+ if entry.Colour.IsSet() {
+ formatting += fmt.Sprintf("\033[38;2;%d;%d;%dm", entry.Colour.Red(), entry.Colour.Green(), entry.Colour.Blue())
+ }
+ if entry.Background.IsSet() {
+ formatting += fmt.Sprintf("\033[48;2;%d;%d;%dm", entry.Background.Red(), entry.Background.Green(), entry.Background.Blue())
+ }
+
+ writeToken(w, formatting, token.Value)
+ }
+ return nil
+}
@@ -0,0 +1,76 @@
+package chroma
+
+import "strings"
+
+// An Iterator across tokens.
+//
+// EOF will be returned at the end of the Token stream.
+//
+// If an error occurs within an Iterator, it may propagate this in a panic. Formatters should recover.
+type Iterator func() Token
+
+// Tokens consumes all tokens from the iterator and returns them as a slice.
+func (i Iterator) Tokens() []Token {
+ var out []Token
+ for t := i(); t != EOF; t = i() {
+ out = append(out, t)
+ }
+ return out
+}
+
+// Concaterator concatenates tokens from a series of iterators.
+func Concaterator(iterators ...Iterator) Iterator {
+ return func() Token {
+ for len(iterators) > 0 {
+ t := iterators[0]()
+ if t != EOF {
+ return t
+ }
+ iterators = iterators[1:]
+ }
+ return EOF
+ }
+}
+
+// Literator converts a sequence of literal Tokens into an Iterator.
+func Literator(tokens ...Token) Iterator {
+ return func() Token {
+ if len(tokens) == 0 {
+ return EOF
+ }
+ token := tokens[0]
+ tokens = tokens[1:]
+ return token
+ }
+}
+
+// SplitTokensIntoLines splits tokens containing newlines in two.
+func SplitTokensIntoLines(tokens []Token) (out [][]Token) {
+ var line []Token // nolint: prealloc
+ for _, token := range tokens {
+ for strings.Contains(token.Value, "\n") {
+ parts := strings.SplitAfterN(token.Value, "\n", 2)
+ // Token becomes the tail.
+ token.Value = parts[1]
+
+ // Append the head to the line and flush the line.
+ clone := token.Clone()
+ clone.Value = parts[0]
+ line = append(line, clone)
+ out = append(out, line)
+ line = nil
+ }
+ line = append(line, token)
+ }
+ if len(line) > 0 {
+ out = append(out, line)
+ }
+ // Strip empty trailing token line.
+ if len(out) > 0 {
+ last := out[len(out)-1]
+ if len(last) == 1 && last[0].Value == "" {
+ out = out[:len(out)-1]
+ }
+ }
+ return
+}
@@ -0,0 +1,162 @@
+package chroma
+
+import (
+ "fmt"
+ "strings"
+)
+
+var (
+ defaultOptions = &TokeniseOptions{
+ State: "root",
+ EnsureLF: true,
+ }
+)
+
+// Config for a lexer.
+type Config struct {
+ // Name of the lexer.
+ Name string `xml:"name,omitempty"`
+
+ // Shortcuts for the lexer
+ Aliases []string `xml:"alias,omitempty"`
+
+ // File name globs
+ Filenames []string `xml:"filename,omitempty"`
+
+ // Secondary file name globs
+ AliasFilenames []string `xml:"alias_filename,omitempty"`
+
+ // MIME types
+ MimeTypes []string `xml:"mime_type,omitempty"`
+
+ // Regex matching is case-insensitive.
+ CaseInsensitive bool `xml:"case_insensitive,omitempty"`
+
+ // Regex matches all characters.
+ DotAll bool `xml:"dot_all,omitempty"`
+
+ // Regex does not match across lines ($ matches EOL).
+ //
+ // Defaults to multiline.
+ NotMultiline bool `xml:"not_multiline,omitempty"`
+
+ // Don't strip leading and trailing newlines from the input.
+ // DontStripNL bool
+
+ // Strip all leading and trailing whitespace from the input
+ // StripAll bool
+
+ // Make sure that the input ends with a newline. This
+ // is required for some lexers that consume input linewise.
+ EnsureNL bool `xml:"ensure_nl,omitempty"`
+
+ // If given and greater than 0, expand tabs in the input.
+ // TabSize int
+
+ // Priority of lexer.
+ //
+ // If this is 0 it will be treated as a default of 1.
+ Priority float32 `xml:"priority,omitempty"`
+
+ // Analyse is a list of regexes to match against the input.
+ //
+ // If a match is found, the score is returned if single attribute is set to true,
+ // otherwise the sum of all the score of matching patterns will be
+ // used as the final score.
+ Analyse *AnalyseConfig `xml:"analyse,omitempty"`
+}
+
+// AnalyseConfig defines the list of regexes analysers.
+type AnalyseConfig struct {
+ Regexes []RegexConfig `xml:"regex,omitempty"`
+ // If true, the first matching score is returned.
+ First bool `xml:"first,attr"`
+}
+
+// RegexConfig defines a single regex pattern and its score in case of match.
+type RegexConfig struct {
+ Pattern string `xml:"pattern,attr"`
+ Score float32 `xml:"score,attr"`
+}
+
+// Token output to formatter.
+type Token struct {
+ Type TokenType `json:"type"`
+ Value string `json:"value"`
+}
+
+func (t *Token) String() string { return t.Value }
+func (t *Token) GoString() string { return fmt.Sprintf("&Token{%s, %q}", t.Type, t.Value) }
+
+// Clone returns a clone of the Token.
+func (t *Token) Clone() Token {
+ return *t
+}
+
+// EOF is returned by lexers at the end of input.
+var EOF Token
+
+// TokeniseOptions contains options for tokenisers.
+type TokeniseOptions struct {
+ // State to start tokenisation in. Defaults to "root".
+ State string
+ // Nested tokenisation.
+ Nested bool
+
+ // If true, all EOLs are converted into LF
+ // by replacing CRLF and CR
+ EnsureLF bool
+}
+
+// A Lexer for tokenising source code.
+type Lexer interface {
+ // Config describing the features of the Lexer.
+ Config() *Config
+ // Tokenise returns an Iterator over tokens in text.
+ Tokenise(options *TokeniseOptions, text string) (Iterator, error)
+ // SetRegistry sets the registry this Lexer is associated with.
+ //
+ // The registry should be used by the Lexer if it needs to look up other
+ // lexers.
+ SetRegistry(registry *LexerRegistry) Lexer
+ // SetAnalyser sets a function the Lexer should use for scoring how
+ // likely a fragment of text is to match this lexer, between 0.0 and 1.0.
+ // A value of 1 indicates high confidence.
+ //
+ // Lexers may ignore this if they implement their own analysers.
+ SetAnalyser(analyser func(text string) float32) Lexer
+ // AnalyseText scores how likely a fragment of text is to match
+ // this lexer, between 0.0 and 1.0. A value of 1 indicates high confidence.
+ AnalyseText(text string) float32
+}
+
+// Lexers is a slice of lexers sortable by name.
+type Lexers []Lexer
+
+func (l Lexers) Len() int { return len(l) }
+func (l Lexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+func (l Lexers) Less(i, j int) bool {
+ return strings.ToLower(l[i].Config().Name) < strings.ToLower(l[j].Config().Name)
+}
+
+// PrioritisedLexers is a slice of lexers sortable by priority.
+type PrioritisedLexers []Lexer
+
+func (l PrioritisedLexers) Len() int { return len(l) }
+func (l PrioritisedLexers) Swap(i, j int) { l[i], l[j] = l[j], l[i] }
+func (l PrioritisedLexers) Less(i, j int) bool {
+ ip := l[i].Config().Priority
+ if ip == 0 {
+ ip = 1
+ }
+ jp := l[j].Config().Priority
+ if jp == 0 {
+ jp = 1
+ }
+ return ip > jp
+}
+
+// Analyser determines how appropriate this lexer is for the given text.
+type Analyser interface {
+ AnalyseText(text string) float32
+}
@@ -0,0 +1,46 @@
+# Chroma lexers
+
+All lexers in Chroma should now be defined in XML unless they require custom code.
+
+## Lexer tests
+
+The tests in this directory feed a known input `testdata/<name>.actual` into the parser for `<name>` and check
+that its output matches `<name>.expected`.
+
+It is also possible to perform several tests on a same parser `<name>`, by placing know inputs `*.actual` into a
+directory `testdata/<name>/`.
+
+### Running the tests
+
+Run the tests as normal:
+```go
+go test ./lexers
+```
+
+### Update existing tests
+
+When you add a new test data file (`*.actual`), you need to regenerate all tests. That's how Chroma creates the `*.expected` test file based on the corresponding lexer.
+
+To regenerate all tests, type in your terminal:
+
+```go
+RECORD=true go test ./lexers
+```
+
+This first sets the `RECORD` environment variable to `true`. Then it runs `go test` on the `./lexers` directory of the Chroma project.
+
+(That environment variable tells Chroma it needs to output test data. After running `go test ./lexers` you can remove or reset that variable.)
+
+#### Windows users
+
+Windows users will find that the `RECORD=true go test ./lexers` command fails in both the standard command prompt terminal and in PowerShell.
+
+Instead we have to perform both steps separately:
+
+- Set the `RECORD` environment variable to `true`.
+ + In the regular command prompt window, the `set` command sets an environment variable for the current session: `set RECORD=true`. See [this page](https://superuser.com/questions/212150/how-to-set-env-variable-in-windows-cmd-line) for more.
+ + In PowerShell, you can use the `$env:RECORD = 'true'` command for that. See [this article](https://mcpmag.com/articles/2019/03/28/environment-variables-in-powershell.aspx) for more.
+ + You can also make a persistent environment variable by hand in the Windows computer settings. See [this article](https://www.computerhope.com/issues/ch000549.htm) for how.
+- When the environment variable is set, run `go test ./lexers`.
+
+Chroma will now regenerate the test files and print its results to the console window.
@@ -0,0 +1,275 @@
+package lexers
+
+import (
+ . "github.com/alecthomas/chroma/v2" // nolint
+)
+
+// Matcher token stub for docs, or
+// Named matcher: @name, or
+// Path matcher: /foo, or
+// Wildcard path matcher: *
+// nolint: gosec
+var caddyfileMatcherTokenRegexp = `(\[\<matcher\>\]|@[^\s]+|/[^\s]+|\*)`
+
+// Comment at start of line, or
+// Comment preceded by whitespace
+var caddyfileCommentRegexp = `(^|\s+)#.*\n`
+
+// caddyfileCommon are the rules common to both of the lexer variants
+func caddyfileCommonRules() Rules {
+ return Rules{
+ "site_block_common": {
+ Include("site_body"),
+ // Any other directive
+ {`[^\s#]+`, Keyword, Push("directive")},
+ Include("base"),
+ },
+ "site_body": {
+ // Import keyword
+ {`\b(import|invoke)\b( [^\s#]+)`, ByGroups(Keyword, Text), Push("subdirective")},
+ // Matcher definition
+ {`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
+ // Matcher token stub for docs
+ {`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
+ // These cannot have matchers but may have things that look like
+ // matchers in their arguments, so we just parse as a subdirective.
+ {`\b(try_files|tls|log|bind)\b`, Keyword, Push("subdirective")},
+ // These are special, they can nest more directives
+ {`\b(handle_errors|handle_path|handle_response|replace_status|handle|route)\b`, Keyword, Push("nested_directive")},
+ // uri directive has special syntax
+ {`\b(uri)\b`, Keyword, Push("uri_directive")},
+ },
+ "matcher": {
+ {`\{`, Punctuation, Push("block")},
+ // Not can be one-liner
+ {`not`, Keyword, Push("deep_not_matcher")},
+ // Heredoc for CEL expression
+ Include("heredoc"),
+ // Backtick for CEL expression
+ {"`", StringBacktick, Push("backticks")},
+ // Any other same-line matcher
+ {`[^\s#]+`, Keyword, Push("arguments")},
+ // Terminators
+ {`\s*\n`, Text, Pop(1)},
+ {`\}`, Punctuation, Pop(1)},
+ Include("base"),
+ },
+ "block": {
+ {`\}`, Punctuation, Pop(2)},
+ // Using double quotes doesn't stop at spaces
+ {`"`, StringDouble, Push("double_quotes")},
+ // Using backticks doesn't stop at spaces
+ {"`", StringBacktick, Push("backticks")},
+ // Not can be one-liner
+ {`not`, Keyword, Push("not_matcher")},
+ // Directives & matcher definitions
+ Include("site_body"),
+ // Any directive
+ {`[^\s#]+`, Keyword, Push("subdirective")},
+ Include("base"),
+ },
+ "nested_block": {
+ {`\}`, Punctuation, Pop(2)},
+ // Using double quotes doesn't stop at spaces
+ {`"`, StringDouble, Push("double_quotes")},
+ // Using backticks doesn't stop at spaces
+ {"`", StringBacktick, Push("backticks")},
+ // Not can be one-liner
+ {`not`, Keyword, Push("not_matcher")},
+ // Directives & matcher definitions
+ Include("site_body"),
+ // Any other subdirective
+ {`[^\s#]+`, Keyword, Push("directive")},
+ Include("base"),
+ },
+ "not_matcher": {
+ {`\}`, Punctuation, Pop(2)},
+ {`\{(?=\s)`, Punctuation, Push("block")},
+ {`[^\s#]+`, Keyword, Push("arguments")},
+ {`\s+`, Text, nil},
+ },
+ "deep_not_matcher": {
+ {`\}`, Punctuation, Pop(2)},
+ {`\{(?=\s)`, Punctuation, Push("block")},
+ {`[^\s#]+`, Keyword, Push("deep_subdirective")},
+ {`\s+`, Text, nil},
+ },
+ "directive": {
+ {`\{(?=\s)`, Punctuation, Push("block")},
+ {caddyfileMatcherTokenRegexp, NameDecorator, Push("arguments")},
+ {caddyfileCommentRegexp, CommentSingle, Pop(1)},
+ {`\s*\n`, Text, Pop(1)},
+ Include("base"),
+ },
+ "nested_directive": {
+ {`\{(?=\s)`, Punctuation, Push("nested_block")},
+ {caddyfileMatcherTokenRegexp, NameDecorator, Push("nested_arguments")},
+ {caddyfileCommentRegexp, CommentSingle, Pop(1)},
+ {`\s*\n`, Text, Pop(1)},
+ Include("base"),
+ },
+ "subdirective": {
+ {`\{(?=\s)`, Punctuation, Push("block")},
+ {caddyfileCommentRegexp, CommentSingle, Pop(1)},
+ {`\s*\n`, Text, Pop(1)},
+ Include("base"),
+ },
+ "arguments": {
+ {`\{(?=\s)`, Punctuation, Push("block")},
+ {caddyfileCommentRegexp, CommentSingle, Pop(2)},
+ {`\\\n`, Text, nil}, // Skip escaped newlines
+ {`\s*\n`, Text, Pop(2)},
+ Include("base"),
+ },
+ "nested_arguments": {
+ {`\{(?=\s)`, Punctuation, Push("nested_block")},
+ {caddyfileCommentRegexp, CommentSingle, Pop(2)},
+ {`\\\n`, Text, nil}, // Skip escaped newlines
+ {`\s*\n`, Text, Pop(2)},
+ Include("base"),
+ },
+ "deep_subdirective": {
+ {`\{(?=\s)`, Punctuation, Push("block")},
+ {caddyfileCommentRegexp, CommentSingle, Pop(3)},
+ {`\s*\n`, Text, Pop(3)},
+ Include("base"),
+ },
+ "uri_directive": {
+ {`\{(?=\s)`, Punctuation, Push("block")},
+ {caddyfileMatcherTokenRegexp, NameDecorator, nil},
+ {`(strip_prefix|strip_suffix|replace|path_regexp)`, NameConstant, Push("arguments")},
+ {caddyfileCommentRegexp, CommentSingle, Pop(1)},
+ {`\s*\n`, Text, Pop(1)},
+ Include("base"),
+ },
+ "double_quotes": {
+ Include("placeholder"),
+ {`\\"`, StringDouble, nil},
+ {`[^"]`, StringDouble, nil},
+ {`"`, StringDouble, Pop(1)},
+ },
+ "backticks": {
+ Include("placeholder"),
+ {"\\\\`", StringBacktick, nil},
+ {"[^`]", StringBacktick, nil},
+ {"`", StringBacktick, Pop(1)},
+ },
+ "optional": {
+ // Docs syntax for showing optional parts with [ ]
+ {`\[`, Punctuation, Push("optional")},
+ Include("name_constants"),
+ {`\|`, Punctuation, nil},
+ {`[^\[\]\|]+`, String, nil},
+ {`\]`, Punctuation, Pop(1)},
+ },
+ "heredoc": {
+ {`(<<([a-zA-Z0-9_-]+))(\n(.*|\n)*)(\s*)(\2)`, ByGroups(StringHeredoc, nil, String, String, String, StringHeredoc), nil},
+ },
+ "name_constants": {
+ {`\b(most_recently_modified|largest_size|smallest_size|first_exist|internal|disable_redirects|ignore_loaded_certs|disable_certs|private_ranges|first|last|before|after|on|off)\b(\||(?=\]|\s|$))`, ByGroups(NameConstant, Punctuation), nil},
+ },
+ "placeholder": {
+ // Placeholder with dots, colon for default value, brackets for args[0:]
+ {`\{[\w+.\[\]\:\$-]+\}`, StringEscape, nil},
+ // Handle opening brackets with no matching closing one
+ {`\{[^\}\s]*\b`, String, nil},
+ },
+ "base": {
+ {caddyfileCommentRegexp, CommentSingle, nil},
+ {`\[\<matcher\>\]`, NameDecorator, nil},
+ Include("name_constants"),
+ Include("heredoc"),
+ {`(https?://)?([a-z0-9.-]+)(:)([0-9]+)([^\s]*)`, ByGroups(Name, Name, Punctuation, NumberInteger, Name), nil},
+ {`\[`, Punctuation, Push("optional")},
+ {"`", StringBacktick, Push("backticks")},
+ {`"`, StringDouble, Push("double_quotes")},
+ Include("placeholder"),
+ {`[a-z-]+/[a-z-+]+`, String, nil},
+ {`[0-9]+([smhdk]|ns|us|µs|ms)?\b`, NumberInteger, nil},
+ {`[^\s\n#\{]+`, String, nil},
+ {`/[^\s#]*`, Name, nil},
+ {`\s+`, Text, nil},
+ },
+ }
+}
+
+// Caddyfile lexer.
+var Caddyfile = Register(MustNewLexer(
+ &Config{
+ Name: "Caddyfile",
+ Aliases: []string{"caddyfile", "caddy"},
+ Filenames: []string{"Caddyfile*"},
+ MimeTypes: []string{},
+ },
+ caddyfileRules,
+))
+
+func caddyfileRules() Rules {
+ return Rules{
+ "root": {
+ {caddyfileCommentRegexp, CommentSingle, nil},
+ // Global options block
+ {`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
+ // Top level import
+ {`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
+ // Snippets
+ {`(&?\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
+ // Site label
+ {`[^#{(\s,]+`, GenericHeading, Push("label")},
+ // Site label with placeholder
+ {`\{[\w+.\[\]\:\$-]+\}`, StringEscape, Push("label")},
+ {`\s+`, Text, nil},
+ },
+ "globals": {
+ {`\}`, Punctuation, Pop(1)},
+ // Global options are parsed as subdirectives (no matcher)
+ {`[^\s#]+`, Keyword, Push("subdirective")},
+ Include("base"),
+ },
+ "snippet": {
+ {`\}`, Punctuation, Pop(1)},
+ Include("site_body"),
+ // Any other directive
+ {`[^\s#]+`, Keyword, Push("directive")},
+ Include("base"),
+ },
+ "label": {
+ // Allow multiple labels, comma separated, newlines after
+ // a comma means another label is coming
+ {`,\s*\n?`, Text, nil},
+ {` `, Text, nil},
+ // Site label with placeholder
+ Include("placeholder"),
+ // Site label
+ {`[^#{(\s,]+`, GenericHeading, nil},
+ // Comment after non-block label (hack because comments end in \n)
+ {`#.*\n`, CommentSingle, Push("site_block")},
+ // Note: if \n, we'll never pop out of the site_block, it's valid
+ {`\{(?=\s)|\n`, Punctuation, Push("site_block")},
+ },
+ "site_block": {
+ {`\}`, Punctuation, Pop(2)},
+ Include("site_block_common"),
+ },
+ }.Merge(caddyfileCommonRules())
+}
+
+// Caddyfile directive-only lexer.
+var CaddyfileDirectives = Register(MustNewLexer(
+ &Config{
+ Name: "Caddyfile Directives",
+ Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
+ Filenames: []string{},
+ MimeTypes: []string{},
+ },
+ caddyfileDirectivesRules,
+))
+
+func caddyfileDirectivesRules() Rules {
+ return Rules{
+ // Same as "site_block" in Caddyfile
+ "root": {
+ Include("site_block_common"),
+ },
+ }.Merge(caddyfileCommonRules())
+}
@@ -0,0 +1,243 @@
+package lexers
+
+import (
+ . "github.com/alecthomas/chroma/v2" // nolint
+)
+
+var (
+ clBuiltinFunctions = []string{
+ "<", "<=", "=", ">", ">=", "-", "/", "/=", "*", "+", "1-", "1+",
+ "abort", "abs", "acons", "acos", "acosh", "add-method", "adjoin",
+ "adjustable-array-p", "adjust-array", "allocate-instance",
+ "alpha-char-p", "alphanumericp", "append", "apply", "apropos",
+ "apropos-list", "aref", "arithmetic-error-operands",
+ "arithmetic-error-operation", "array-dimension", "array-dimensions",
+ "array-displacement", "array-element-type", "array-has-fill-pointer-p",
+ "array-in-bounds-p", "arrayp", "array-rank", "array-row-major-index",
+ "array-total-size", "ash", "asin", "asinh", "assoc", "assoc-if",
+ "assoc-if-not", "atan", "atanh", "atom", "bit", "bit-and", "bit-andc1",
+ "bit-andc2", "bit-eqv", "bit-ior", "bit-nand", "bit-nor", "bit-not",
+ "bit-orc1", "bit-orc2", "bit-vector-p", "bit-xor", "boole",
+ "both-case-p", "boundp", "break", "broadcast-stream-streams",
+ "butlast", "byte", "byte-position", "byte-size", "caaaar", "caaadr",
+ "caaar", "caadar", "caaddr", "caadr", "caar", "cadaar", "cadadr",
+ "cadar", "caddar", "cadddr", "caddr", "cadr", "call-next-method", "car",
+ "cdaaar", "cdaadr", "cdaar", "cdadar", "cdaddr", "cdadr", "cdar",
+ "cddaar", "cddadr", "cddar", "cdddar", "cddddr", "cdddr", "cddr", "cdr",
+ "ceiling", "cell-error-name", "cerror", "change-class", "char", "char<",
+ "char<=", "char=", "char>", "char>=", "char/=", "character",
+ "characterp", "char-code", "char-downcase", "char-equal",
+ "char-greaterp", "char-int", "char-lessp", "char-name",
+ "char-not-equal", "char-not-greaterp", "char-not-lessp", "char-upcase",
+ "cis", "class-name", "class-of", "clear-input", "clear-output",
+ "close", "clrhash", "code-char", "coerce", "compile",
+ "compiled-function-p", "compile-file", "compile-file-pathname",
+ "compiler-macro-function", "complement", "complex", "complexp",
+ "compute-applicable-methods", "compute-restarts", "concatenate",
+ "concatenated-stream-streams", "conjugate", "cons", "consp",
+ "constantly", "constantp", "continue", "copy-alist", "copy-list",
+ "copy-pprint-dispatch", "copy-readtable", "copy-seq", "copy-structure",
+ "copy-symbol", "copy-tree", "cos", "cosh", "count", "count-if",
+ "count-if-not", "decode-float", "decode-universal-time", "delete",
+ "delete-duplicates", "delete-file", "delete-if", "delete-if-not",
+ "delete-package", "denominator", "deposit-field", "describe",
+ "describe-object", "digit-char", "digit-char-p", "directory",
+ "directory-namestring", "disassemble", "documentation", "dpb",
+ "dribble", "echo-stream-input-stream", "echo-stream-output-stream",
+ "ed", "eighth", "elt", "encode-universal-time", "endp",
+ "enough-namestring", "ensure-directories-exist",
+ "ensure-generic-function", "eq", "eql", "equal", "equalp", "error",
+ "eval", "evenp", "every", "exp", "export", "expt", "fboundp",
+ "fceiling", "fdefinition", "ffloor", "fifth", "file-author",
+ "file-error-pathname", "file-length", "file-namestring",
+ "file-position", "file-string-length", "file-write-date",
+ "fill", "fill-pointer", "find", "find-all-symbols", "find-class",
+ "find-if", "find-if-not", "find-method", "find-package", "find-restart",
+ "find-symbol", "finish-output", "first", "float", "float-digits",
+ "floatp", "float-precision", "float-radix", "float-sign", "floor",
+ "fmakunbound", "force-output", "format", "fourth", "fresh-line",
+ "fround", "ftruncate", "funcall", "function-keywords",
+ "function-lambda-expression", "functionp", "gcd", "gensym", "gentemp",
+ "get", "get-decoded-time", "get-dispatch-macro-character", "getf",
+ "gethash", "get-internal-real-time", "get-internal-run-time",
+ "get-macro-character", "get-output-stream-string", "get-properties",
+ "get-setf-expansion", "get-universal-time", "graphic-char-p",
+ "hash-table-count", "hash-table-p", "hash-table-rehash-size",
+ "hash-table-rehash-threshold", "hash-table-size", "hash-table-test",
+ "host-namestring", "identity", "imagpart", "import",
+ "initialize-instance", "input-stream-p", "inspect",
+ "integer-decode-float", "integer-length", "integerp",
+ "interactive-stream-p", "intern", "intersection",
+ "invalid-method-error", "invoke-debugger", "invoke-restart",
+ "invoke-restart-interactively", "isqrt", "keywordp", "last", "lcm",
+ "ldb", "ldb-test", "ldiff", "length", "lisp-implementation-type",
+ "lisp-implementation-version", "list", "list*", "list-all-packages",
+ "listen", "list-length", "listp", "load",
+ "load-logical-pathname-translations", "log", "logand", "logandc1",
+ "logandc2", "logbitp", "logcount", "logeqv", "logical-pathname",
+ "logical-pathname-translations", "logior", "lognand", "lognor",
+ "lognot", "logorc1", "logorc2", "logtest", "logxor", "long-site-name",
+ "lower-case-p", "machine-instance", "machine-type", "machine-version",
+ "macroexpand", "macroexpand-1", "macro-function", "make-array",
+ "make-broadcast-stream", "make-concatenated-stream", "make-condition",
+ "make-dispatch-macro-character", "make-echo-stream", "make-hash-table",
+ "make-instance", "make-instances-obsolete", "make-list",
+ "make-load-form", "make-load-form-saving-slots", "make-package",
+ "make-pathname", "make-random-state", "make-sequence", "make-string",
+ "make-string-input-stream", "make-string-output-stream", "make-symbol",
+ "make-synonym-stream", "make-two-way-stream", "makunbound", "map",
+ "mapc", "mapcan", "mapcar", "mapcon", "maphash", "map-into", "mapl",
+ "maplist", "mask-field", "max", "member", "member-if", "member-if-not",
+ "merge", "merge-pathnames", "method-combination-error",
+ "method-qualifiers", "min", "minusp", "mismatch", "mod",
+ "muffle-warning", "name-char", "namestring", "nbutlast", "nconc",
+ "next-method-p", "nintersection", "ninth", "no-applicable-method",
+ "no-next-method", "not", "notany", "notevery", "nreconc", "nreverse",
+ "nset-difference", "nset-exclusive-or", "nstring-capitalize",
+ "nstring-downcase", "nstring-upcase", "nsublis", "nsubst", "nsubst-if",
+ "nsubst-if-not", "nsubstitute", "nsubstitute-if", "nsubstitute-if-not",
+ "nth", "nthcdr", "null", "numberp", "numerator", "nunion", "oddp",
+ "open", "open-stream-p", "output-stream-p", "package-error-package",
+ "package-name", "package-nicknames", "packagep",
+ "package-shadowing-symbols", "package-used-by-list", "package-use-list",
+ "pairlis", "parse-integer", "parse-namestring", "pathname",
+ "pathname-device", "pathname-directory", "pathname-host",
+ "pathname-match-p", "pathname-name", "pathnamep", "pathname-type",
+ "pathname-version", "peek-char", "phase", "plusp", "position",
+ "position-if", "position-if-not", "pprint", "pprint-dispatch",
+ "pprint-fill", "pprint-indent", "pprint-linear", "pprint-newline",
+ "pprint-tab", "pprint-tabular", "prin1", "prin1-to-string", "princ",
+ "princ-to-string", "print", "print-object", "probe-file", "proclaim",
+ "provide", "random", "random-state-p", "rassoc", "rassoc-if",
+ "rassoc-if-not", "rational", "rationalize", "rationalp", "read",
+ "read-byte", "read-char", "read-char-no-hang", "read-delimited-list",
+ "read-from-string", "read-line", "read-preserving-whitespace",
+ "read-sequence", "readtable-case", "readtablep", "realp", "realpart",
+ "reduce", "reinitialize-instance", "rem", "remhash", "remove",
+ "remove-duplicates", "remove-if", "remove-if-not", "remove-method",
+ "remprop", "rename-file", "rename-package", "replace", "require",
+ "rest", "restart-name", "revappend", "reverse", "room", "round",
+ "row-major-aref", "rplaca", "rplacd", "sbit", "scale-float", "schar",
+ "search", "second", "set", "set-difference",
+ "set-dispatch-macro-character", "set-exclusive-or",
+ "set-macro-character", "set-pprint-dispatch", "set-syntax-from-char",
+ "seventh", "shadow", "shadowing-import", "shared-initialize",
+ "short-site-name", "signal", "signum", "simple-bit-vector-p",
+ "simple-condition-format-arguments", "simple-condition-format-control",
+ "simple-string-p", "simple-vector-p", "sin", "sinh", "sixth", "sleep",
+ "slot-boundp", "slot-exists-p", "slot-makunbound", "slot-missing",
+ "slot-unbound", "slot-value", "software-type", "software-version",
+ "some", "sort", "special-operator-p", "sqrt", "stable-sort",
+ "standard-char-p", "store-value", "stream-element-type",
+ "stream-error-stream", "stream-external-format", "streamp", "string",
+ "string<", "string<=", "string=", "string>", "string>=", "string/=",
+ "string-capitalize", "string-downcase", "string-equal",
+ "string-greaterp", "string-left-trim", "string-lessp",
+ "string-not-equal", "string-not-greaterp", "string-not-lessp",
+ "stringp", "string-right-trim", "string-trim", "string-upcase",
+ "sublis", "subseq", "subsetp", "subst", "subst-if", "subst-if-not",
+ "substitute", "substitute-if", "substitute-if-not", "subtypep", "svref",
+ "sxhash", "symbol-function", "symbol-name", "symbolp", "symbol-package",
+ "symbol-plist", "symbol-value", "synonym-stream-symbol", "syntax:",
+ "tailp", "tan", "tanh", "tenth", "terpri", "third",
+ "translate-logical-pathname", "translate-pathname", "tree-equal",
+ "truename", "truncate", "two-way-stream-input-stream",
+ "two-way-stream-output-stream", "type-error-datum",
+ "type-error-expected-type", "type-of", "typep", "unbound-slot-instance",
+ "unexport", "unintern", "union", "unread-char", "unuse-package",
+ "update-instance-for-different-class",
+ "update-instance-for-redefined-class", "upgraded-array-element-type",
+ "upgraded-complex-part-type", "upper-case-p", "use-package",
+ "user-homedir-pathname", "use-value", "values", "values-list", "vector",
+ "vectorp", "vector-pop", "vector-push", "vector-push-extend", "warn",
+ "wild-pathname-p", "write", "write-byte", "write-char", "write-line",
+ "write-sequence", "write-string", "write-to-string", "yes-or-no-p",
+ "y-or-n-p", "zerop",
+ }
+
+ clSpecialForms = []string{
+ "block", "catch", "declare", "eval-when", "flet", "function", "go", "if",
+ "labels", "lambda", "let", "let*", "load-time-value", "locally", "macrolet",
+ "multiple-value-call", "multiple-value-prog1", "progn", "progv", "quote",
+ "return-from", "setq", "symbol-macrolet", "tagbody", "the", "throw",
+ "unwind-protect",
+ }
+
+ clMacros = []string{
+ "and", "assert", "call-method", "case", "ccase", "check-type", "cond",
+ "ctypecase", "decf", "declaim", "defclass", "defconstant", "defgeneric",
+ "define-compiler-macro", "define-condition", "define-method-combination",
+ "define-modify-macro", "define-setf-expander", "define-symbol-macro",
+ "defmacro", "defmethod", "defpackage", "defparameter", "defsetf",
+ "defstruct", "deftype", "defun", "defvar", "destructuring-bind", "do",
+ "do*", "do-all-symbols", "do-external-symbols", "dolist", "do-symbols",
+ "dotimes", "ecase", "etypecase", "formatter", "handler-bind",
+ "handler-case", "ignore-errors", "incf", "in-package", "lambda", "loop",
+ "loop-finish", "make-method", "multiple-value-bind", "multiple-value-list",
+ "multiple-value-setq", "nth-value", "or", "pop",
+ "pprint-exit-if-list-exhausted", "pprint-logical-block", "pprint-pop",
+ "print-unreadable-object", "prog", "prog*", "prog1", "prog2", "psetf",
+ "psetq", "push", "pushnew", "remf", "restart-bind", "restart-case",
+ "return", "rotatef", "setf", "shiftf", "step", "time", "trace", "typecase",
+ "unless", "untrace", "when", "with-accessors", "with-compilation-unit",
+ "with-condition-restarts", "with-hash-table-iterator",
+ "with-input-from-string", "with-open-file", "with-open-stream",
+ "with-output-to-string", "with-package-iterator", "with-simple-restart",
+ "with-slots", "with-standard-io-syntax",
+ }
+
+ clLambdaListKeywords = []string{
+ "&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional",
+ "&rest", "&whole",
+ }
+
+ clDeclarations = []string{
+ "dynamic-extent", "ignore", "optimize", "ftype", "inline", "special",
+ "ignorable", "notinline", "type",
+ }
+
+ clBuiltinTypes = []string{
+ "atom", "boolean", "base-char", "base-string", "bignum", "bit",
+ "compiled-function", "extended-char", "fixnum", "keyword", "nil",
+ "signed-byte", "short-float", "single-float", "double-float", "long-float",
+ "simple-array", "simple-base-string", "simple-bit-vector", "simple-string",
+ "simple-vector", "standard-char", "unsigned-byte",
+
+ // Condition Types
+ "arithmetic-error", "cell-error", "condition", "control-error",
+ "division-by-zero", "end-of-file", "error", "file-error",
+ "floating-point-inexact", "floating-point-overflow",
+ "floating-point-underflow", "floating-point-invalid-operation",
+ "parse-error", "package-error", "print-not-readable", "program-error",
+ "reader-error", "serious-condition", "simple-condition", "simple-error",
+ "simple-type-error", "simple-warning", "stream-error", "storage-condition",
+ "style-warning", "type-error", "unbound-variable", "unbound-slot",
+ "undefined-function", "warning",
+ }
+
+ clBuiltinClasses = []string{
+ "array", "broadcast-stream", "bit-vector", "built-in-class", "character",
+ "class", "complex", "concatenated-stream", "cons", "echo-stream",
+ "file-stream", "float", "function", "generic-function", "hash-table",
+ "integer", "list", "logical-pathname", "method-combination", "method",
+ "null", "number", "package", "pathname", "ratio", "rational", "readtable",
+ "real", "random-state", "restart", "sequence", "standard-class",
+ "standard-generic-function", "standard-method", "standard-object",
+ "string-stream", "stream", "string", "structure-class", "structure-object",
+ "symbol", "synonym-stream", "t", "two-way-stream", "vector",
+ }
+)
+
+// Common Lisp lexer.
+var CommonLisp = Register(TypeRemappingLexer(MustNewXMLLexer(
+ embedded,
+ "embedded/common_lisp.xml",
+), TypeMapping{
+ {NameVariable, NameFunction, clBuiltinFunctions},
+ {NameVariable, Keyword, clSpecialForms},
+ {NameVariable, NameBuiltin, clMacros},
+ {NameVariable, Keyword, clLambdaListKeywords},
+ {NameVariable, Keyword, clDeclarations},
+ {NameVariable, KeywordType, clBuiltinTypes},
+ {NameVariable, NameClass, clBuiltinClasses},
+}))
@@ -0,0 +1,17 @@
+package lexers
+
+import (
+ "regexp"
+)
+
+// TODO(moorereason): can this be factored away?
+var zoneAnalyserRe = regexp.MustCompile(`(?m)^@\s+IN\s+SOA\s+`)
+
+func init() { // nolint: gochecknoinits
+ Get("dns").SetAnalyser(func(text string) float32 {
+ if zoneAnalyserRe.FindString(text) != "" {
+ return 1.0
+ }
+ return 0.0
+ })
+}
@@ -0,0 +1,533 @@
+package lexers
+
+import (
+ . "github.com/alecthomas/chroma/v2" // nolint
+)
+
+var (
+ emacsMacros = []string{
+ "atomic-change-group", "case", "block", "cl-block", "cl-callf", "cl-callf2",
+ "cl-case", "cl-decf", "cl-declaim", "cl-declare",
+ "cl-define-compiler-macro", "cl-defmacro", "cl-defstruct",
+ "cl-defsubst", "cl-deftype", "cl-defun", "cl-destructuring-bind",
+ "cl-do", "cl-do*", "cl-do-all-symbols", "cl-do-symbols", "cl-dolist",
+ "cl-dotimes", "cl-ecase", "cl-etypecase", "eval-when", "cl-eval-when", "cl-flet",
+ "cl-flet*", "cl-function", "cl-incf", "cl-labels", "cl-letf",
+ "cl-letf*", "cl-load-time-value", "cl-locally", "cl-loop",
+ "cl-macrolet", "cl-multiple-value-bind", "cl-multiple-value-setq",
+ "cl-progv", "cl-psetf", "cl-psetq", "cl-pushnew", "cl-remf",
+ "cl-return", "cl-return-from", "cl-rotatef", "cl-shiftf",
+ "cl-symbol-macrolet", "cl-tagbody", "cl-the", "cl-typecase",
+ "combine-after-change-calls", "condition-case-unless-debug", "decf",
+ "declaim", "declare", "declare-function", "def-edebug-spec",
+ "defadvice", "defclass", "defcustom", "defface", "defgeneric",
+ "defgroup", "define-advice", "define-alternatives",
+ "define-compiler-macro", "define-derived-mode", "define-generic-mode",
+ "define-global-minor-mode", "define-globalized-minor-mode",
+ "define-minor-mode", "define-modify-macro",
+ "define-obsolete-face-alias", "define-obsolete-function-alias",
+ "define-obsolete-variable-alias", "define-setf-expander",
+ "define-skeleton", "defmacro", "defmethod", "defsetf", "defstruct",
+ "defsubst", "deftheme", "deftype", "defun", "defvar-local",
+ "delay-mode-hooks", "destructuring-bind", "do", "do*",
+ "do-all-symbols", "do-symbols", "dolist", "dont-compile", "dotimes",
+ "dotimes-with-progress-reporter", "ecase", "ert-deftest", "etypecase",
+ "eval-and-compile", "eval-when-compile", "flet", "ignore-errors",
+ "incf", "labels", "lambda", "letrec", "lexical-let", "lexical-let*",
+ "loop", "multiple-value-bind", "multiple-value-setq", "noreturn",
+ "oref", "oref-default", "oset", "oset-default", "pcase",
+ "pcase-defmacro", "pcase-dolist", "pcase-exhaustive", "pcase-let",
+ "pcase-let*", "pop", "psetf", "psetq", "push", "pushnew", "remf",
+ "return", "rotatef", "rx", "save-match-data", "save-selected-window",
+ "save-window-excursion", "setf", "setq-local", "shiftf",
+ "track-mouse", "typecase", "unless", "use-package", "when",
+ "while-no-input", "with-case-table", "with-category-table",
+ "with-coding-priority", "with-current-buffer", "with-demoted-errors",
+ "with-eval-after-load", "with-file-modes", "with-local-quit",
+ "with-output-to-string", "with-output-to-temp-buffer",
+ "with-parsed-tramp-file-name", "with-selected-frame",
+ "with-selected-window", "with-silent-modifications", "with-slots",
+ "with-syntax-table", "with-temp-buffer", "with-temp-file",
+ "with-temp-message", "with-timeout", "with-tramp-connection-property",
+ "with-tramp-file-property", "with-tramp-progress-reporter",
+ "with-wrapper-hook", "load-time-value", "locally", "macrolet", "progv",
+ "return-from",
+ }
+
+ emacsSpecialForms = []string{
+ "and", "catch", "cond", "condition-case", "defconst", "defvar",
+ "function", "if", "interactive", "let", "let*", "or", "prog1",
+ "prog2", "progn", "quote", "save-current-buffer", "save-excursion",
+ "save-restriction", "setq", "setq-default", "subr-arity",
+ "unwind-protect", "while",
+ }
+
+ emacsBuiltinFunction = []string{
+ "%", "*", "+", "-", "/", "/=", "1+", "1-", "<", "<=", "=", ">", ">=",
+ "Snarf-documentation", "abort-recursive-edit", "abs",
+ "accept-process-output", "access-file", "accessible-keymaps", "acos",
+ "active-minibuffer-window", "add-face-text-property",
+ "add-name-to-file", "add-text-properties", "all-completions",
+ "append", "apply", "apropos-internal", "aref", "arrayp", "aset",
+ "ash", "asin", "assoc", "assoc-string", "assq", "atan", "atom",
+ "autoload", "autoload-do-load", "backtrace", "backtrace--locals",
+ "backtrace-debug", "backtrace-eval", "backtrace-frame",
+ "backward-char", "backward-prefix-chars", "barf-if-buffer-read-only",
+ "base64-decode-region", "base64-decode-string",
+ "base64-encode-region", "base64-encode-string", "beginning-of-line",
+ "bidi-find-overridden-directionality", "bidi-resolved-levels",
+ "bitmap-spec-p", "bobp", "bolp", "bool-vector",
+ "bool-vector-count-consecutive", "bool-vector-count-population",
+ "bool-vector-exclusive-or", "bool-vector-intersection",
+ "bool-vector-not", "bool-vector-p", "bool-vector-set-difference",
+ "bool-vector-subsetp", "bool-vector-union", "boundp",
+ "buffer-base-buffer", "buffer-chars-modified-tick",
+ "buffer-enable-undo", "buffer-file-name", "buffer-has-markers-at",
+ "buffer-list", "buffer-live-p", "buffer-local-value",
+ "buffer-local-variables", "buffer-modified-p", "buffer-modified-tick",
+ "buffer-name", "buffer-size", "buffer-string", "buffer-substring",
+ "buffer-substring-no-properties", "buffer-swap-text", "bufferp",
+ "bury-buffer-internal", "byte-code", "byte-code-function-p",
+ "byte-to-position", "byte-to-string", "byteorder",
+ "call-interactively", "call-last-kbd-macro", "call-process",
+ "call-process-region", "cancel-kbd-macro-events", "capitalize",
+ "capitalize-region", "capitalize-word", "car", "car-less-than-car",
+ "car-safe", "case-table-p", "category-docstring",
+ "category-set-mnemonics", "category-table", "category-table-p",
+ "ccl-execute", "ccl-execute-on-string", "ccl-program-p", "cdr",
+ "cdr-safe", "ceiling", "char-after", "char-before",
+ "char-category-set", "char-charset", "char-equal", "char-or-string-p",
+ "char-resolve-modifiers", "char-syntax", "char-table-extra-slot",
+ "char-table-p", "char-table-parent", "char-table-range",
+ "char-table-subtype", "char-to-string", "char-width", "characterp",
+ "charset-after", "charset-id-internal", "charset-plist",
+ "charset-priority-list", "charsetp", "check-coding-system",
+ "check-coding-systems-region", "clear-buffer-auto-save-failure",
+ "clear-charset-maps", "clear-face-cache", "clear-font-cache",
+ "clear-image-cache", "clear-string", "clear-this-command-keys",
+ "close-font", "clrhash", "coding-system-aliases",
+ "coding-system-base", "coding-system-eol-type", "coding-system-p",
+ "coding-system-plist", "coding-system-priority-list",
+ "coding-system-put", "color-distance", "color-gray-p",
+ "color-supported-p", "combine-after-change-execute",
+ "command-error-default-function", "command-remapping", "commandp",
+ "compare-buffer-substrings", "compare-strings",
+ "compare-window-configurations", "completing-read",
+ "compose-region-internal", "compose-string-internal",
+ "composition-get-gstring", "compute-motion", "concat", "cons",
+ "consp", "constrain-to-field", "continue-process",
+ "controlling-tty-p", "coordinates-in-window-p", "copy-alist",
+ "copy-category-table", "copy-file", "copy-hash-table", "copy-keymap",
+ "copy-marker", "copy-sequence", "copy-syntax-table", "copysign",
+ "cos", "current-active-maps", "current-bidi-paragraph-direction",
+ "current-buffer", "current-case-table", "current-column",
+ "current-global-map", "current-idle-time", "current-indentation",
+ "current-input-mode", "current-local-map", "current-message",
+ "current-minor-mode-maps", "current-time", "current-time-string",
+ "current-time-zone", "current-window-configuration",
+ "cygwin-convert-file-name-from-windows",
+ "cygwin-convert-file-name-to-windows", "daemon-initialized",
+ "daemonp", "dbus--init-bus", "dbus-get-unique-name",
+ "dbus-message-internal", "debug-timer-check", "declare-equiv-charset",
+ "decode-big5-char", "decode-char", "decode-coding-region",
+ "decode-coding-string", "decode-sjis-char", "decode-time",
+ "default-boundp", "default-file-modes", "default-printer-name",
+ "default-toplevel-value", "default-value", "define-category",
+ "define-charset-alias", "define-charset-internal",
+ "define-coding-system-alias", "define-coding-system-internal",
+ "define-fringe-bitmap", "define-hash-table-test", "define-key",
+ "define-prefix-command", "delete",
+ "delete-all-overlays", "delete-and-extract-region", "delete-char",
+ "delete-directory-internal", "delete-field", "delete-file",
+ "delete-frame", "delete-other-windows-internal", "delete-overlay",
+ "delete-process", "delete-region", "delete-terminal",
+ "delete-window-internal", "delq", "describe-buffer-bindings",
+ "describe-vector", "destroy-fringe-bitmap", "detect-coding-region",
+ "detect-coding-string", "ding", "directory-file-name",
+ "directory-files", "directory-files-and-attributes", "discard-input",
+ "display-supports-face-attributes-p", "do-auto-save", "documentation",
+ "documentation-property", "downcase", "downcase-region",
+ "downcase-word", "draw-string", "dump-colors", "dump-emacs",
+ "dump-face", "dump-frame-glyph-matrix", "dump-glyph-matrix",
+ "dump-glyph-row", "dump-redisplay-history", "dump-tool-bar-row",
+ "elt", "emacs-pid", "encode-big5-char", "encode-char",
+ "encode-coding-region", "encode-coding-string", "encode-sjis-char",
+ "encode-time", "end-kbd-macro", "end-of-line", "eobp", "eolp", "eq",
+ "eql", "equal", "equal-including-properties", "erase-buffer",
+ "error-message-string", "eval", "eval-buffer", "eval-region",
+ "event-convert-list", "execute-kbd-macro", "exit-recursive-edit",
+ "exp", "expand-file-name", "expt", "external-debugging-output",
+ "face-attribute-relative-p", "face-attributes-as-vector", "face-font",
+ "fboundp", "fceiling", "fetch-bytecode", "ffloor",
+ "field-beginning", "field-end", "field-string",
+ "field-string-no-properties", "file-accessible-directory-p",
+ "file-acl", "file-attributes", "file-attributes-lessp",
+ "file-directory-p", "file-executable-p", "file-exists-p",
+ "file-locked-p", "file-modes", "file-name-absolute-p",
+ "file-name-all-completions", "file-name-as-directory",
+ "file-name-completion", "file-name-directory",
+ "file-name-nondirectory", "file-newer-than-file-p", "file-readable-p",
+ "file-regular-p", "file-selinux-context", "file-symlink-p",
+ "file-system-info", "file-system-info", "file-writable-p",
+ "fillarray", "find-charset-region", "find-charset-string",
+ "find-coding-systems-region-internal", "find-composition-internal",
+ "find-file-name-handler", "find-font", "find-operation-coding-system",
+ "float", "float-time", "floatp", "floor", "fmakunbound",
+ "following-char", "font-at", "font-drive-otf", "font-face-attributes",
+ "font-family-list", "font-get", "font-get-glyphs",
+ "font-get-system-font", "font-get-system-normal-font", "font-info",
+ "font-match-p", "font-otf-alternates", "font-put",
+ "font-shape-gstring", "font-spec", "font-variation-glyphs",
+ "font-xlfd-name", "fontp", "fontset-font", "fontset-info",
+ "fontset-list", "fontset-list-all", "force-mode-line-update",
+ "force-window-update", "format", "format-mode-line",
+ "format-network-address", "format-time-string", "forward-char",
+ "forward-comment", "forward-line", "forward-word",
+ "frame-border-width", "frame-bottom-divider-width",
+ "frame-can-run-window-configuration-change-hook", "frame-char-height",
+ "frame-char-width", "frame-face-alist", "frame-first-window",
+ "frame-focus", "frame-font-cache", "frame-fringe-width", "frame-list",
+ "frame-live-p", "frame-or-buffer-changed-p", "frame-parameter",
+ "frame-parameters", "frame-pixel-height", "frame-pixel-width",
+ "frame-pointer-visible-p", "frame-right-divider-width",
+ "frame-root-window", "frame-scroll-bar-height",
+ "frame-scroll-bar-width", "frame-selected-window", "frame-terminal",
+ "frame-text-cols", "frame-text-height", "frame-text-lines",
+ "frame-text-width", "frame-total-cols", "frame-total-lines",
+ "frame-visible-p", "framep", "frexp", "fringe-bitmaps-at-pos",
+ "fround", "fset", "ftruncate", "funcall", "funcall-interactively",
+ "function-equal", "functionp", "gap-position", "gap-size",
+ "garbage-collect", "gc-status", "generate-new-buffer-name", "get",
+ "get-buffer", "get-buffer-create", "get-buffer-process",
+ "get-buffer-window", "get-byte", "get-char-property",
+ "get-char-property-and-overlay", "get-file-buffer", "get-file-char",
+ "get-internal-run-time", "get-load-suffixes", "get-pos-property",
+ "get-process", "get-screen-color", "get-text-property",
+ "get-unicode-property-internal", "get-unused-category",
+ "get-unused-iso-final-char", "getenv-internal", "gethash",
+ "gfile-add-watch", "gfile-rm-watch", "global-key-binding",
+ "gnutls-available-p", "gnutls-boot", "gnutls-bye", "gnutls-deinit",
+ "gnutls-error-fatalp", "gnutls-error-string", "gnutls-errorp",
+ "gnutls-get-initstage", "gnutls-peer-status",
+ "gnutls-peer-status-warning-describe", "goto-char", "gpm-mouse-start",
+ "gpm-mouse-stop", "group-gid", "group-real-gid",
+ "handle-save-session", "handle-switch-frame", "hash-table-count",
+ "hash-table-p", "hash-table-rehash-size",
+ "hash-table-rehash-threshold", "hash-table-size", "hash-table-test",
+ "hash-table-weakness", "iconify-frame", "identity", "image-flush",
+ "image-mask-p", "image-metadata", "image-size", "imagemagick-types",
+ "imagep", "indent-to", "indirect-function", "indirect-variable",
+ "init-image-library", "inotify-add-watch", "inotify-rm-watch",
+ "input-pending-p", "insert", "insert-and-inherit",
+ "insert-before-markers", "insert-before-markers-and-inherit",
+ "insert-buffer-substring", "insert-byte", "insert-char",
+ "insert-file-contents", "insert-startup-screen", "int86",
+ "integer-or-marker-p", "integerp", "interactive-form", "intern",
+ "intern-soft", "internal--track-mouse", "internal-char-font",
+ "internal-complete-buffer", "internal-copy-lisp-face",
+ "internal-default-process-filter",
+ "internal-default-process-sentinel", "internal-describe-syntax-value",
+ "internal-event-symbol-parse-modifiers",
+ "internal-face-x-get-resource", "internal-get-lisp-face-attribute",
+ "internal-lisp-face-attribute-values", "internal-lisp-face-empty-p",
+ "internal-lisp-face-equal-p", "internal-lisp-face-p",
+ "internal-make-lisp-face", "internal-make-var-non-special",
+ "internal-merge-in-global-face",
+ "internal-set-alternative-font-family-alist",
+ "internal-set-alternative-font-registry-alist",
+ "internal-set-font-selection-order",
+ "internal-set-lisp-face-attribute",
+ "internal-set-lisp-face-attribute-from-resource",
+ "internal-show-cursor", "internal-show-cursor-p", "interrupt-process",
+ "invisible-p", "invocation-directory", "invocation-name", "isnan",
+ "iso-charset", "key-binding", "key-description",
+ "keyboard-coding-system", "keymap-parent", "keymap-prompt", "keymapp",
+ "keywordp", "kill-all-local-variables", "kill-buffer", "kill-emacs",
+ "kill-local-variable", "kill-process", "last-nonminibuffer-frame",
+ "lax-plist-get", "lax-plist-put", "ldexp", "length",
+ "libxml-parse-html-region", "libxml-parse-xml-region",
+ "line-beginning-position", "line-end-position", "line-pixel-height",
+ "list", "list-fonts", "list-system-processes", "listp", "load",
+ "load-average", "local-key-binding", "local-variable-if-set-p",
+ "local-variable-p", "locale-info", "locate-file-internal",
+ "lock-buffer", "log", "logand", "logb", "logior", "lognot", "logxor",
+ "looking-at", "lookup-image", "lookup-image-map", "lookup-key",
+ "lower-frame", "lsh", "macroexpand", "make-bool-vector",
+ "make-byte-code", "make-category-set", "make-category-table",
+ "make-char", "make-char-table", "make-directory-internal",
+ "make-frame-invisible", "make-frame-visible", "make-hash-table",
+ "make-indirect-buffer", "make-keymap", "make-list",
+ "make-local-variable", "make-marker", "make-network-process",
+ "make-overlay", "make-serial-process", "make-sparse-keymap",
+ "make-string", "make-symbol", "make-symbolic-link", "make-temp-name",
+ "make-terminal-frame", "make-variable-buffer-local",
+ "make-variable-frame-local", "make-vector", "makunbound",
+ "map-char-table", "map-charset-chars", "map-keymap",
+ "map-keymap-internal", "mapatoms", "mapc", "mapcar", "mapconcat",
+ "maphash", "mark-marker", "marker-buffer", "marker-insertion-type",
+ "marker-position", "markerp", "match-beginning", "match-data",
+ "match-end", "matching-paren", "max", "max-char", "md5", "member",
+ "memory-info", "memory-limit", "memory-use-counts", "memq", "memql",
+ "menu-bar-menu-at-x-y", "menu-or-popup-active-p",
+ "menu-or-popup-active-p", "merge-face-attribute", "message",
+ "message-box", "message-or-box", "min",
+ "minibuffer-completion-contents", "minibuffer-contents",
+ "minibuffer-contents-no-properties", "minibuffer-depth",
+ "minibuffer-prompt", "minibuffer-prompt-end",
+ "minibuffer-selected-window", "minibuffer-window", "minibufferp",
+ "minor-mode-key-binding", "mod", "modify-category-entry",
+ "modify-frame-parameters", "modify-syntax-entry",
+ "mouse-pixel-position", "mouse-position", "move-overlay",
+ "move-point-visually", "move-to-column", "move-to-window-line",
+ "msdos-downcase-filename", "msdos-long-file-names", "msdos-memget",
+ "msdos-memput", "msdos-mouse-disable", "msdos-mouse-enable",
+ "msdos-mouse-init", "msdos-mouse-p", "msdos-remember-default-colors",
+ "msdos-set-keyboard", "msdos-set-mouse-buttons",
+ "multibyte-char-to-unibyte", "multibyte-string-p", "narrow-to-region",
+ "natnump", "nconc", "network-interface-info",
+ "network-interface-list", "new-fontset", "newline-cache-check",
+ "next-char-property-change", "next-frame", "next-overlay-change",
+ "next-property-change", "next-read-file-uses-dialog-p",
+ "next-single-char-property-change", "next-single-property-change",
+ "next-window", "nlistp", "nreverse", "nth", "nthcdr", "null",
+ "number-or-marker-p", "number-to-string", "numberp",
+ "open-dribble-file", "open-font", "open-termscript",
+ "optimize-char-table", "other-buffer", "other-window-for-scrolling",
+ "overlay-buffer", "overlay-end", "overlay-get", "overlay-lists",
+ "overlay-properties", "overlay-put", "overlay-recenter",
+ "overlay-start", "overlayp", "overlays-at", "overlays-in",
+ "parse-partial-sexp", "play-sound-internal", "plist-get",
+ "plist-member", "plist-put", "point", "point-marker", "point-max",
+ "point-max-marker", "point-min", "point-min-marker",
+ "pos-visible-in-window-p", "position-bytes", "posix-looking-at",
+ "posix-search-backward", "posix-search-forward", "posix-string-match",
+ "posn-at-point", "posn-at-x-y", "preceding-char",
+ "prefix-numeric-value", "previous-char-property-change",
+ "previous-frame", "previous-overlay-change",
+ "previous-property-change", "previous-single-char-property-change",
+ "previous-single-property-change", "previous-window", "prin1",
+ "prin1-to-string", "princ", "print", "process-attributes",
+ "process-buffer", "process-coding-system", "process-command",
+ "process-connection", "process-contact", "process-datagram-address",
+ "process-exit-status", "process-filter", "process-filter-multibyte-p",
+ "process-id", "process-inherit-coding-system-flag", "process-list",
+ "process-mark", "process-name", "process-plist",
+ "process-query-on-exit-flag", "process-running-child-p",
+ "process-send-eof", "process-send-region", "process-send-string",
+ "process-sentinel", "process-status", "process-tty-name",
+ "process-type", "processp", "profiler-cpu-log",
+ "profiler-cpu-running-p", "profiler-cpu-start", "profiler-cpu-stop",
+ "profiler-memory-log", "profiler-memory-running-p",
+ "profiler-memory-start", "profiler-memory-stop", "propertize",
+ "purecopy", "put", "put-text-property",
+ "put-unicode-property-internal", "puthash", "query-font",
+ "query-fontset", "quit-process", "raise-frame", "random", "rassoc",
+ "rassq", "re-search-backward", "re-search-forward", "read",
+ "read-buffer", "read-char", "read-char-exclusive",
+ "read-coding-system", "read-command", "read-event",
+ "read-from-minibuffer", "read-from-string", "read-function",
+ "read-key-sequence", "read-key-sequence-vector",
+ "read-no-blanks-input", "read-non-nil-coding-system", "read-string",
+ "read-variable", "recent-auto-save-p", "recent-doskeys",
+ "recent-keys", "recenter", "recursion-depth", "recursive-edit",
+ "redirect-debugging-output", "redirect-frame-focus", "redisplay",
+ "redraw-display", "redraw-frame", "regexp-quote", "region-beginning",
+ "region-end", "register-ccl-program", "register-code-conversion-map",
+ "remhash", "remove-list-of-text-properties", "remove-text-properties",
+ "rename-buffer", "rename-file", "replace-match",
+ "reset-this-command-lengths", "resize-mini-window-internal",
+ "restore-buffer-modified-p", "resume-tty", "reverse", "round",
+ "run-hook-with-args", "run-hook-with-args-until-failure",
+ "run-hook-with-args-until-success", "run-hook-wrapped", "run-hooks",
+ "run-window-configuration-change-hook", "run-window-scroll-functions",
+ "safe-length", "scan-lists", "scan-sexps", "scroll-down",
+ "scroll-left", "scroll-other-window", "scroll-right", "scroll-up",
+ "search-backward", "search-forward", "secure-hash", "select-frame",
+ "select-window", "selected-frame", "selected-window",
+ "self-insert-command", "send-string-to-terminal", "sequencep",
+ "serial-process-configure", "set", "set-buffer",
+ "set-buffer-auto-saved", "set-buffer-major-mode",
+ "set-buffer-modified-p", "set-buffer-multibyte", "set-case-table",
+ "set-category-table", "set-char-table-extra-slot",
+ "set-char-table-parent", "set-char-table-range", "set-charset-plist",
+ "set-charset-priority", "set-coding-system-priority",
+ "set-cursor-size", "set-default", "set-default-file-modes",
+ "set-default-toplevel-value", "set-file-acl", "set-file-modes",
+ "set-file-selinux-context", "set-file-times", "set-fontset-font",
+ "set-frame-height", "set-frame-position", "set-frame-selected-window",
+ "set-frame-size", "set-frame-width", "set-fringe-bitmap-face",
+ "set-input-interrupt-mode", "set-input-meta-mode", "set-input-mode",
+ "set-keyboard-coding-system-internal", "set-keymap-parent",
+ "set-marker", "set-marker-insertion-type", "set-match-data",
+ "set-message-beep", "set-minibuffer-window",
+ "set-mouse-pixel-position", "set-mouse-position",
+ "set-network-process-option", "set-output-flow-control",
+ "set-process-buffer", "set-process-coding-system",
+ "set-process-datagram-address", "set-process-filter",
+ "set-process-filter-multibyte",
+ "set-process-inherit-coding-system-flag", "set-process-plist",
+ "set-process-query-on-exit-flag", "set-process-sentinel",
+ "set-process-window-size", "set-quit-char",
+ "set-safe-terminal-coding-system-internal", "set-screen-color",
+ "set-standard-case-table", "set-syntax-table",
+ "set-terminal-coding-system-internal", "set-terminal-local-value",
+ "set-terminal-parameter", "set-text-properties", "set-time-zone-rule",
+ "set-visited-file-modtime", "set-window-buffer",
+ "set-window-combination-limit", "set-window-configuration",
+ "set-window-dedicated-p", "set-window-display-table",
+ "set-window-fringes", "set-window-hscroll", "set-window-margins",
+ "set-window-new-normal", "set-window-new-pixel",
+ "set-window-new-total", "set-window-next-buffers",
+ "set-window-parameter", "set-window-point", "set-window-prev-buffers",
+ "set-window-redisplay-end-trigger", "set-window-scroll-bars",
+ "set-window-start", "set-window-vscroll", "setcar", "setcdr",
+ "setplist", "show-face-resources", "signal", "signal-process", "sin",
+ "single-key-description", "skip-chars-backward", "skip-chars-forward",
+ "skip-syntax-backward", "skip-syntax-forward", "sleep-for", "sort",
+ "sort-charsets", "special-variable-p", "split-char",
+ "split-window-internal", "sqrt", "standard-case-table",
+ "standard-category-table", "standard-syntax-table", "start-kbd-macro",
+ "start-process", "stop-process", "store-kbd-macro-event", "string",
+ "string-as-multibyte", "string-as-unibyte", "string-bytes",
+ "string-collate-equalp", "string-collate-lessp", "string-equal",
+ "string-lessp", "string-make-multibyte", "string-make-unibyte",
+ "string-match", "string-to-char", "string-to-multibyte",
+ "string-to-number", "string-to-syntax", "string-to-unibyte",
+ "string-width", "stringp", "subr-name", "subrp",
+ "subst-char-in-region", "substitute-command-keys",
+ "substitute-in-file-name", "substring", "substring-no-properties",
+ "suspend-emacs", "suspend-tty", "suspicious-object", "sxhash",
+ "symbol-function", "symbol-name", "symbol-plist", "symbol-value",
+ "symbolp", "syntax-table", "syntax-table-p", "system-groups",
+ "system-move-file-to-trash", "system-name", "system-users", "tan",
+ "terminal-coding-system", "terminal-list", "terminal-live-p",
+ "terminal-local-value", "terminal-name", "terminal-parameter",
+ "terminal-parameters", "terpri", "test-completion",
+ "text-char-description", "text-properties-at", "text-property-any",
+ "text-property-not-all", "this-command-keys",
+ "this-command-keys-vector", "this-single-command-keys",
+ "this-single-command-raw-keys", "time-add", "time-less-p",
+ "time-subtract", "tool-bar-get-system-style", "tool-bar-height",
+ "tool-bar-pixel-width", "top-level", "trace-redisplay",
+ "trace-to-stderr", "translate-region-internal", "transpose-regions",
+ "truncate", "try-completion", "tty-display-color-cells",
+ "tty-display-color-p", "tty-no-underline",
+ "tty-suppress-bold-inverse-default-colors", "tty-top-frame",
+ "tty-type", "type-of", "undo-boundary", "unencodable-char-position",
+ "unhandled-file-name-directory", "unibyte-char-to-multibyte",
+ "unibyte-string", "unicode-property-table-internal", "unify-charset",
+ "unintern", "unix-sync", "unlock-buffer", "upcase", "upcase-initials",
+ "upcase-initials-region", "upcase-region", "upcase-word",
+ "use-global-map", "use-local-map", "user-full-name",
+ "user-login-name", "user-real-login-name", "user-real-uid",
+ "user-uid", "variable-binding-locus", "vconcat", "vector",
+ "vector-or-char-table-p", "vectorp", "verify-visited-file-modtime",
+ "vertical-motion", "visible-frame-list", "visited-file-modtime",
+ "w16-get-clipboard-data", "w16-selection-exists-p",
+ "w16-set-clipboard-data", "w32-battery-status",
+ "w32-default-color-map", "w32-define-rgb-color",
+ "w32-display-monitor-attributes-list", "w32-frame-menu-bar-size",
+ "w32-frame-rect", "w32-get-clipboard-data",
+ "w32-get-codepage-charset", "w32-get-console-codepage",
+ "w32-get-console-output-codepage", "w32-get-current-locale-id",
+ "w32-get-default-locale-id", "w32-get-keyboard-layout",
+ "w32-get-locale-info", "w32-get-valid-codepages",
+ "w32-get-valid-keyboard-layouts", "w32-get-valid-locale-ids",
+ "w32-has-winsock", "w32-long-file-name", "w32-reconstruct-hot-key",
+ "w32-register-hot-key", "w32-registered-hot-keys",
+ "w32-selection-exists-p", "w32-send-sys-command",
+ "w32-set-clipboard-data", "w32-set-console-codepage",
+ "w32-set-console-output-codepage", "w32-set-current-locale",
+ "w32-set-keyboard-layout", "w32-set-process-priority",
+ "w32-shell-execute", "w32-short-file-name", "w32-toggle-lock-key",
+ "w32-unload-winsock", "w32-unregister-hot-key", "w32-window-exists-p",
+ "w32notify-add-watch", "w32notify-rm-watch",
+ "waiting-for-user-input-p", "where-is-internal", "widen",
+ "widget-apply", "widget-get", "widget-put",
+ "window-absolute-pixel-edges", "window-at", "window-body-height",
+ "window-body-width", "window-bottom-divider-width", "window-buffer",
+ "window-combination-limit", "window-configuration-frame",
+ "window-configuration-p", "window-dedicated-p",
+ "window-display-table", "window-edges", "window-end", "window-frame",
+ "window-fringes", "window-header-line-height", "window-hscroll",
+ "window-inside-absolute-pixel-edges", "window-inside-edges",
+ "window-inside-pixel-edges", "window-left-child",
+ "window-left-column", "window-line-height", "window-list",
+ "window-list-1", "window-live-p", "window-margins",
+ "window-minibuffer-p", "window-mode-line-height", "window-new-normal",
+ "window-new-pixel", "window-new-total", "window-next-buffers",
+ "window-next-sibling", "window-normal-size", "window-old-point",
+ "window-parameter", "window-parameters", "window-parent",
+ "window-pixel-edges", "window-pixel-height", "window-pixel-left",
+ "window-pixel-top", "window-pixel-width", "window-point",
+ "window-prev-buffers", "window-prev-sibling",
+ "window-redisplay-end-trigger", "window-resize-apply",
+ "window-resize-apply-total", "window-right-divider-width",
+ "window-scroll-bar-height", "window-scroll-bar-width",
+ "window-scroll-bars", "window-start", "window-system",
+ "window-text-height", "window-text-pixel-size", "window-text-width",
+ "window-top-child", "window-top-line", "window-total-height",
+ "window-total-width", "window-use-time", "window-valid-p",
+ "window-vscroll", "windowp", "write-char", "write-region",
+ "x-backspace-delete-keys-p", "x-change-window-property",
+ "x-change-window-property", "x-close-connection",
+ "x-close-connection", "x-create-frame", "x-create-frame",
+ "x-delete-window-property", "x-delete-window-property",
+ "x-disown-selection-internal", "x-display-backing-store",
+ "x-display-backing-store", "x-display-color-cells",
+ "x-display-color-cells", "x-display-grayscale-p",
+ "x-display-grayscale-p", "x-display-list", "x-display-list",
+ "x-display-mm-height", "x-display-mm-height", "x-display-mm-width",
+ "x-display-mm-width", "x-display-monitor-attributes-list",
+ "x-display-pixel-height", "x-display-pixel-height",
+ "x-display-pixel-width", "x-display-pixel-width", "x-display-planes",
+ "x-display-planes", "x-display-save-under", "x-display-save-under",
+ "x-display-screens", "x-display-screens", "x-display-visual-class",
+ "x-display-visual-class", "x-family-fonts", "x-file-dialog",
+ "x-file-dialog", "x-file-dialog", "x-focus-frame", "x-frame-geometry",
+ "x-frame-geometry", "x-get-atom-name", "x-get-resource",
+ "x-get-selection-internal", "x-hide-tip", "x-hide-tip",
+ "x-list-fonts", "x-load-color-file", "x-menu-bar-open-internal",
+ "x-menu-bar-open-internal", "x-open-connection", "x-open-connection",
+ "x-own-selection-internal", "x-parse-geometry", "x-popup-dialog",
+ "x-popup-menu", "x-register-dnd-atom", "x-select-font",
+ "x-select-font", "x-selection-exists-p", "x-selection-owner-p",
+ "x-send-client-message", "x-server-max-request-size",
+ "x-server-max-request-size", "x-server-vendor", "x-server-vendor",
+ "x-server-version", "x-server-version", "x-show-tip", "x-show-tip",
+ "x-synchronize", "x-synchronize", "x-uses-old-gtk-dialog",
+ "x-window-property", "x-window-property", "x-wm-set-size-hint",
+ "xw-color-defined-p", "xw-color-defined-p", "xw-color-values",
+ "xw-color-values", "xw-display-color-p", "xw-display-color-p",
+ "yes-or-no-p", "zlib-available-p", "zlib-decompress-region",
+ "forward-point",
+ }
+
+ emacsBuiltinFunctionHighlighted = []string{
+ "defvaralias", "provide", "require",
+ "with-no-warnings", "define-widget", "with-electric-help",
+ "throw", "defalias", "featurep",
+ }
+
+ emacsLambdaListKeywords = []string{
+ "&allow-other-keys", "&aux", "&body", "&environment", "&key", "&optional",
+ "&rest", "&whole",
+ }
+
+ emacsErrorKeywords = []string{
+ "cl-assert", "cl-check-type", "error", "signal",
+ "user-error", "warn",
+ }
+)
+
+// EmacsLisp lexer.
+var EmacsLisp = Register(TypeRemappingLexer(MustNewXMLLexer(
+ embedded,
+ "embedded/emacslisp.xml",
+), TypeMapping{
+ {NameVariable, NameFunction, emacsBuiltinFunction},
+ {NameVariable, NameBuiltin, emacsSpecialForms},
+ {NameVariable, NameException, emacsErrorKeywords},
+ {NameVariable, NameBuiltin, append(emacsBuiltinFunctionHighlighted, emacsMacros...)},
+ {NameVariable, KeywordPseudo, emacsLambdaListKeywords},
+}))
@@ -0,0 +1,154 @@
+<lexer>
+ <config>
+ <name>ABAP</name>
+ <alias>abap</alias>
+ <filename>*.abap</filename>
+ <filename>*.ABAP</filename>
+ <mime_type>text/x-abap</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="common">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^\*.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\".*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="##\w+">
+ <token type="CommentSpecial"/>
+ </rule>
+ </state>
+ <state name="variable-names">
+ <rule pattern="<\S+>">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\w[\w~]*(?:(\[\])|->\*)?">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="common"/>
+ </rule>
+ <rule pattern="CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|TRANSACTION|TRANSFORMATION))\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(FORM|PERFORM)(\s+)(\w+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(PERFORM)(\s+)(\()(\w+)(\))">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="NameVariable"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(METHOD)(\s+)([\w~]+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s+)([\w\-]+)([=\-]>)([\w\-~]+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ <token type="Operator"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?<=(=|-)>)([\w\-~]+)(?=\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(TEXT)(-)(\d{3})">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Punctuation"/>
+ <token type="LiteralNumberInteger"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(TEXT)(-)(\w{3})">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Punctuation"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(ADD-CORRESPONDING|AUTHORITY-CHECK|CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|DELETE-ADJACENT|DIVIDE-CORRESPONDING|EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|INTERFACE-POOL|INVERTED-DATE|LOAD-OF-PROGRAM|LOG-POINT|MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|OUTPUT-LENGTH|PRINT-CONTROL|SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|SYNTAX-CHECK|SYSTEM-EXCEPTIONS|TYPE-POOL|TYPE-POOLS|NO-DISPLAY)\b">
+ <token type="Keyword"/>
+ </rule>
@@ -0,0 +1,66 @@
+<lexer>
+ <config>
+ <name>ABNF</name>
+ <alias>abnf</alias>
+ <filename>*.abnf</filename>
+ <mime_type>text/x-abnf</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern=";.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(%[si])?"[^"]*"">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="%b[01]+\-[01]+\b">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="%b[01]+(\.[01]+)*\b">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="%d[0-9]+\-[0-9]+\b">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="%d[0-9]+(\.[0-9]+)*\b">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="%x[0-9a-fA-F]+\-[0-9a-fA-F]+\b">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="%x[0-9a-fA-F]+(\.[0-9a-fA-F]+)*\b">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="\b[0-9]+\*[0-9]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b[0-9]+\*">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b[0-9]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\*">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(HEXDIG|DQUOTE|DIGIT|VCHAR|OCTET|ALPHA|CHAR|CRLF|HTAB|LWSP|BIT|CTL|WSP|LF|SP|CR)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[a-zA-Z][a-zA-Z0-9-]+\b">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(=/|=|/)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[\[\]()]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,68 @@
+<lexer>
+ <config>
+ <name>ActionScript</name>
+ <alias>as</alias>
+ <alias>actionscript</alias>
+ <filename>*.as</filename>
+ <mime_type>application/x-actionscript</mime_type>
+ <mime_type>text/x-actionscript</mime_type>
+ <mime_type>text/actionscript</mime_type>
+ <dot_all>true</dot_all>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\\|\\/|[^/\n])*/[gim]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[~^*!%&<>|+=:;,/?\\-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{}\[\]();.]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(instanceof|arguments|continue|default|typeof|switch|return|catch|break|while|throw|each|this|with|else|case|var|new|for|try|if|do|in)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(implements|protected|namespace|interface|intrinsic|override|function|internal|private|package|extends|dynamic|import|native|return|public|static|class|const|super|final|get|set)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b">
+ <token type="KeywordConstant"/>
+ </rule>
@@ -0,0 +1,163 @@
+<lexer>
+ <config>
+ <name>ActionScript 3</name>
+ <alias>as3</alias>
+ <alias>actionscript3</alias>
+ <filename>*.as</filename>
+ <mime_type>application/x-actionscript3</mime_type>
+ <mime_type>text/x-actionscript3</mime_type>
+ <mime_type>text/actionscript3</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="funcparams">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(\s*)(\.\.\.)?([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)(\s*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Name"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="defval"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Operator"/>
+ <push state="type"/>
+ </rule>
+ </state>
+ <state name="type">
+ <rule pattern="(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?|\*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ </bygroups>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ <pop depth="2"/>
+ </rule>
+ <rule>
+ <pop depth="2"/>
+ </rule>
+ </state>
+ <state name="defval">
+ <rule pattern="(=)(\s*)([^(),]+)(\s*)(,?)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <usingself state="root"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Operator"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(function\s+)([$a-zA-Z_]\w*)(\s*)(\()">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ <push state="funcparams"/>
+ </rule>
+ <rule pattern="(var|const)(\s+)([$a-zA-Z_]\w*)(\s*)(:)(\s*)([$a-zA-Z_]\w*(?:\.<\w+>)?)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ <token type="Name"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(import|package)(\s+)((?:[$a-zA-Z_]\w*|\.)+)(\s*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(new)(\s+)([$a-zA-Z_]\w*(?:\.<\w+>)?)(\s*)(\()">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\\|\\/|[^\n])*/[gisx]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="(\.)([$a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameAttribute"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(case|default|for|each|in|while|do|break|return|continue|if|else|throw|try|catch|with|new|typeof|arguments|instanceof|this|switch|import|include|as|is)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(class|public|final|internal|native|override|private|protected|static|import|extends|implements|interface|intrinsic|return|super|dynamic|function|const|get|namespace|package|set)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|unescape)\b">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="[$a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="[~^*!%&<>|+=:;,/?\\{}\[\]().-]+">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,321 @@
+<lexer>
+ <config>
+ <name>Ada</name>
+ <alias>ada</alias>
+ <alias>ada95</alias>
+ <alias>ada2005</alias>
+ <filename>*.adb</filename>
+ <filename>*.ads</filename>
+ <filename>*.ada</filename>
+ <mime_type>text/x-ada</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="end">
+ <rule pattern="(if|case|record|loop|select)">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern=""[^"]+"|[\w.]+">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="array_def">
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(\w+)(\s+)(range)">
+ <bygroups>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="KeywordReserved"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="package_instantiation">
+ <rule pattern="("[^"]+"|\w+)(\s+)(=>)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[\w.\'"]">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="subprogram">
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="#pop" state="formal_part"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="is\b">
+ <token type="KeywordReserved"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=""[^"]+"|\w+">
+ <token type="NameFunction"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="type_def">
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="formal_part"/>
+ </rule>
+ <rule pattern="with|and|use">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="array\b">
+ <token type="KeywordReserved"/>
+ <push state="#pop" state="array_def"/>
+ </rule>
+ <rule pattern="record\b">
+ <token type="KeywordReserved"/>
+ <push state="record_def"/>
+ </rule>
+ <rule pattern="(null record)(;)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="[\w.]+">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="formal_part">
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern=",|:[^=]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(in|not|null|out|access)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="package">
+ <rule pattern="body">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="is\s+new|renames">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="is">
+ <token type="KeywordReserved"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="package_instantiation"/>
+ </rule>
+ <rule pattern="([\w.]+)">
+ <token type="NameClass"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="attribute">
+ <rule pattern="(')(\w+)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameAttribute"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="record_def">
+ <rule pattern="end record">
+ <token type="KeywordReserved"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="--.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="function|procedure|entry">
+ <token type="KeywordDeclaration"/>
+ <push state="subprogram"/>
+ </rule>
+ <rule pattern="(subtype|type)(\s+)(\w+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ </bygroups>
+ <push state="type_def"/>
+ </rule>
+ <rule pattern="task|protected">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(subtype)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(end)(\s+)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="end"/>
+ </rule>
+ <rule pattern="(pragma)(\s+)(\w+)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(Short_Short_Integer|Short_Short_Float|Long_Long_Integer|Long_Long_Float|Wide_Character|Reference_Type|Short_Integer|Long_Integer|Wide_String|Short_Float|Controlled|Long_Float|Character|Generator|File_Type|File_Mode|Positive|Duration|Boolean|Natural|Integer|Address|Cursor|String|Count|Float|Byte)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="generic|private">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="package">
+ <token type="KeywordDeclaration"/>
+ <push state="package"/>
+ </rule>
+ <rule pattern="array\b">
+ <token type="KeywordReserved"/>
+ <push state="array_def"/>
+ </rule>
+ <rule pattern="(with|use)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern="(\w+)(\s*)(:)(\s*)(constant)">
+ <bygroups>
+ <token type="NameConstant"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="KeywordReserved"/>
+ </bygroups>
+ </rule>
+ <rule pattern="<<\w+>>">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="(\w+)(\s*)(:)(\s*)(declare|begin|loop|for|while)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="KeywordReserved"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(synchronized|overriding|terminate|interface|exception|protected|separate|constant|abstract|renames|reverse|subtype|aliased|declare|requeue|limited|return|tagged|access|record|select|accept|digits|others|pragma|entry|elsif|delta|delay|array|until|range|raise|while|begin|abort|else|loop|when|type|null|then|body|task|goto|case|exit|end|for|abs|xor|all|new|out|is|of|if|or|do|at)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern=""[^"]*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="attribute"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule pattern="'[^']'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(\w+)(\s*|[(,])">
+ <bygroups>
+ <token type="Name"/>
+ <usingself state="root"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<>|=>|:=|[()|:;,.'])">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[*<>+=/&-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\n+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="[0-9_]+#[0-9a-f]+#">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9_]+\.[0-9_]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9_]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,66 @@
+<lexer>
+ <config>
+ <name>Agda</name>
+ <alias>agda</alias>
+ <filename>*.agda</filename>
+ <mime_type>text/x-agda</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^(\s*)([^\s(){}]+)(\s*)(:)(\s*)"><bygroups><token type="TextWhitespace"/><token type="NameFunction"/><token type="TextWhitespace"/><token type="OperatorWord"/><token type="TextWhitespace"/></bygroups></rule>
+ <rule pattern="--(?![!#$%&*+./<=>?@^|_~:\\]).*?$"><token type="CommentSingle"/></rule>
+ <rule pattern="\{-"><token type="CommentMultiline"/><push state="comment"/></rule>
+ <rule pattern="\{!"><token type="CommentMultiline"/><push state="hole"/></rule>
+ <rule pattern="\b(abstract|codata|coinductive|constructor|data|do|eta-equality|field|forall|hiding|in|inductive|infix|infixl|infixr|instance|interleaved|let|macro|mutual|no-eta-equality|open|overlap|pattern|postulate|primitive|private|quote|quoteTerm|record|renaming|rewrite|syntax|tactic|unquote|unquoteDecl|unquoteDef|using|variable|where|with)(?!\')\b"><token type="KeywordReserved"/></rule>
+ <rule pattern="(import|module)(\s+)"><bygroups><token type="KeywordReserved"/><token type="TextWhitespace"/></bygroups><push state="module"/></rule>
+ <rule pattern="\b(Set|Prop)[\u2080-\u2089]*\b"><token type="KeywordType"/></rule>
+ <rule pattern="(\(|\)|\{|\})"><token type="Operator"/></rule>
+ <rule pattern="(\.{1,3}|\||\u03BB|\u2200|\u2192|:|=|->)"><token type="OperatorWord"/></rule>
+ <rule pattern="\d+[eE][+-]?\d+"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="\d+\.\d+([eE][+-]?\d+)?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="0[xX][\da-fA-F]+"><token type="LiteralNumberHex"/></rule>
+ <rule pattern="\d+"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern="'"><token type="LiteralStringChar"/><push state="character"/></rule>
+ <rule pattern="""><token type="LiteralString"/><push state="string"/></rule>
+ <rule pattern="[^\s(){}]+"><token type="Text"/></rule>
+ <rule pattern="\s+?"><token type="TextWhitespace"/></rule>
+ </state>
+ <state name="hole">
+ <rule pattern="[^!{}]+"><token type="CommentMultiline"/></rule>
+ <rule pattern="\{!"><token type="CommentMultiline"/><push/></rule>
+ <rule pattern="!\}"><token type="CommentMultiline"/><pop depth="1"/></rule>
+ <rule pattern="[!{}]"><token type="CommentMultiline"/></rule>
+ </state>
+ <state name="module">
+ <rule pattern="\{-"><token type="CommentMultiline"/><push state="comment"/></rule>
+ <rule pattern="[a-zA-Z][\w.\']*"><token type="Name"/><pop depth="1"/></rule>
+ <rule pattern="[\W0-9_]+"><token type="Text"/></rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^-{}]+"><token type="CommentMultiline"/></rule>
+ <rule pattern="\{-"><token type="CommentMultiline"/><push/></rule>
+ <rule pattern="-\}"><token type="CommentMultiline"/><pop depth="1"/></rule>
+ <rule pattern="[-{}]"><token type="CommentMultiline"/></rule>
+ </state>
+ <state name="character">
+ <rule pattern="[^\\']'"><token type="LiteralStringChar"/><pop depth="1"/></rule>
+ <rule pattern="\\"><token type="LiteralStringEscape"/><push state="escape"/></rule>
+ <rule pattern="'"><token type="LiteralStringChar"/><pop depth="1"/></rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\"]+"><token type="LiteralString"/></rule>
+ <rule pattern="\\"><token type="LiteralStringEscape"/><push state="escape"/></rule>
+ <rule pattern="""><token type="LiteralString"/><pop depth="1"/></rule>
+ </state>
+ <state name="escape">
+ <rule pattern="[abfnrtv"\'&\\]"><token type="LiteralStringEscape"/><pop depth="1"/></rule>
@@ -0,0 +1,75 @@
+<lexer>
+ <config>
+ <name>AL</name>
+ <alias>al</alias>
+ <filename>*.al</filename>
+ <filename>*.dal</filename>
+ <mime_type>text/x-al</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="(?s)\/\*.*?\\*\*\/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(?s)//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\"([^\"])*\"">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="'([^'])*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\b(?i:(ARRAY|ASSERTERROR|BEGIN|BREAK|CASE|DO|DOWNTO|ELSE|END|EVENT|EXIT|FOR|FOREACH|FUNCTION|IF|IMPLEMENTS|IN|INDATASET|INTERFACE|INTERNAL|LOCAL|OF|PROCEDURE|PROGRAM|PROTECTED|REPEAT|RUNONCLIENT|SECURITYFILTERING|SUPPRESSDISPOSE|TEMPORARY|THEN|TO|TRIGGER|UNTIL|VAR|WHILE|WITH|WITHEVENTS))\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(?i:(AND|DIV|MOD|NOT|OR|XOR))\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="\b(?i:(AVERAGE|CONST|COUNT|EXIST|FIELD|FILTER|LOOKUP|MAX|MIN|ORDER|SORTING|SUM|TABLEDATA|UPPERLIMIT|WHERE|ASCENDING|DESCENDING))\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(?i:(CODEUNIT|PAGE|PAGEEXTENSION|PAGECUSTOMIZATION|DOTNET|ENUM|ENUMEXTENSION|VALUE|QUERY|REPORT|TABLE|TABLEEXTENSION|XMLPORT|PROFILE|CONTROLADDIN|REPORTEXTENSION|INTERFACE|PERMISSIONSET|PERMISSIONSETEXTENSION|ENTITLEMENT))\b">
+ <token type="Keyword"/>
+ </rule>
@@ -0,0 +1,58 @@
+
+<lexer>
+ <config>
+ <name>Alloy</name>
+ <alias>alloy</alias>
+ <filename>*.als</filename>
+ <mime_type>text/x-alloy</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="sig">
+ <rule pattern="(extends)\b"><token type="Keyword"/><pop depth="1"/></rule>
+ <rule pattern="[a-zA-Z_][\w]*"*"><token type="Name"/></rule>
+ <rule pattern="[^\S\n]+"><token type="TextWhitespace"/></rule>
+ <rule pattern=","><token type="Punctuation"/></rule>
+ <rule pattern="\{"><token type="Operator"/><pop depth="1"/></rule>
+ </state>
+ <state name="module">
+ <rule pattern="[^\S\n]+"><token type="TextWhitespace"/></rule>
+ <rule pattern="[a-zA-Z_][\w]*"*"><token type="Name"/><pop depth="1"/></rule>
+ </state>
+ <state name="fun">
+ <rule pattern="[^\S\n]+"><token type="TextWhitespace"/></rule>
+ <rule pattern="\{"><token type="Operator"/><pop depth="1"/></rule>
+ <rule pattern="[a-zA-Z_][\w]*"*"><token type="Name"/><pop depth="1"/></rule>
+ </state>
+ <state name="fact">
+ <rule><include state="fun"/></rule>
+ <rule pattern=""\b(\\\\|\\[^\\]|[^"\\])*""><token type="LiteralString"/><pop depth="1"/></rule>
+ </state>
+ <state name="root">
+ <rule pattern="--.*?$"><token type="CommentSingle"/></rule>
+ <rule pattern="//.*?$"><token type="CommentSingle"/></rule>
+ <rule pattern="/\*.*?\*/"><token type="CommentMultiline"/></rule>
+ <rule pattern="[^\S\n]+"><token type="TextWhitespace"/></rule>
+ <rule pattern="(module|open)(\s+)"><bygroups><token type="KeywordNamespace"/><token type="TextWhitespace"/></bygroups><push state="module"/></rule>
+ <rule pattern="(sig|enum)(\s+)"><bygroups><token type="KeywordDeclaration"/><token type="TextWhitespace"/></bygroups><push state="sig"/></rule>
+ <rule pattern="(iden|univ|none)\b"><token type="KeywordConstant"/></rule>
+ <rule pattern="(int|Int)\b"><token type="KeywordType"/></rule>
+ <rule pattern="(var|this|abstract|extends|set|seq|one|lone|let)\b"><token type="Keyword"/></rule>
+ <rule pattern="(all|some|no|sum|disj|when|else)\b"><token type="Keyword"/></rule>
+ <rule pattern="(run|check|for|but|exactly|expect|as|steps)\b"><token type="Keyword"/></rule>
+ <rule pattern="(always|after|eventually|until|release)\b"><token type="Keyword"/></rule>
+ <rule pattern="(historically|before|once|since|triggered)\b"><token type="Keyword"/></rule>
+ <rule pattern="(and|or|implies|iff|in)\b"><token type="OperatorWord"/></rule>
+ <rule pattern="(fun|pred|assert)(\s+)"><bygroups><token type="Keyword"/><token type="TextWhitespace"/></bygroups><push state="fun"/></rule>
+ <rule pattern="(fact)(\s+)"><bygroups><token type="Keyword"/><token type="TextWhitespace"/></bygroups><push state="fact"/></rule>
+ <rule pattern="!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.\.|\.|->"><token type="Operator"/></rule>
+ <rule pattern="[-+/*%=<>&!^|~{}\[\]().\';]"><token type="Operator"/></rule>
+ <rule pattern="[a-zA-Z_][\w]*"*"><token type="Name"/></rule>
+ <rule pattern="[:,]"><token type="Punctuation"/></rule>
+ <rule pattern="[0-9]+"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern=""\b(\\\\|\\[^\\]|[^"\\])*""><token type="LiteralString"/></rule>
+ <rule pattern="\n"><token type="TextWhitespace"/></rule>
+ </state>
+ </rules>
+</lexer>
+
@@ -0,0 +1,108 @@
+<lexer>
+ <config>
+ <name>Angular2</name>
+ <alias>ng2</alias>
+ </config>
+ <rules>
+ <state name="attr">
+ <rule pattern="".*?"">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="'.*?'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\s>]+">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[^{([*#]+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="(\{\{)(\s*)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="ngExpression"/>
+ </rule>
+ <rule pattern="([([]+)([\w:.-]+)([\])]+)(\s*)(=)(\s*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameAttribute"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="attr"/>
+ </rule>
+ <rule pattern="([([]+)([\w:.-]+)([\])]+)(\s*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameAttribute"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([*#])([\w:.-]+)(\s*)(=)(\s*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameAttribute"/>
+ <token type="Punctuation"/>
+ <token type="Operator"/>
+ </bygroups>
+ <push state="attr"/>
+ </rule>
+ <rule pattern="([*#])([\w:.-]+)(\s*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameAttribute"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="ngExpression">
+ <rule pattern="\s+(\|\s+)?">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\}\}">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=":?(true|false)">
+ <token type="LiteralStringBoolean"/>
+ </rule>
+ <rule pattern=":?"(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=":?'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[a-zA-Z][\w-]*(\(.*\))?">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\.[\w-]+(\(.*\))?">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(\?)(\s*)([^}\s]+)(\s*)(:)(\s*)([^}\s]+)(\s*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,317 @@
+<lexer>
+ <config>
+ <name>ANTLR</name>
+ <alias>antlr</alias>
+ </config>
+ <rules>
+ <state name="nested-arg-action">
+ <rule pattern="([^$\[\]\'"/]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|/)+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(\$[a-zA-Z]+)(\.?)(text|value)?">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Punctuation"/>
+ <token type="NameProperty"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\\\\|\\\]|\\\[|[^\[\]])+">
+ <token type="Other"/>
+ </rule>
+ </state>
+ <state name="exception">
+ <rule pattern="\n">
+ <token type="TextWhitespace"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push state="nested-arg-action"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="action"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule pattern="(lexer|parser|tree)?(\s*)(grammar\b)(\s*)([A-Za-z]\w*)(;)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="NameClass"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="options\b">
+ <token type="Keyword"/>
+ <push state="options"/>
+ </rule>
+ <rule pattern="tokens\b">
+ <token type="Keyword"/>
+ <push state="tokens"/>
+ </rule>
+ <rule pattern="(scope)(\s*)([A-Za-z]\w*)(\s*)(\{)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariable"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="action"/>
+ </rule>
+ <rule pattern="(catch|finally)\b">
+ <token type="Keyword"/>
+ <push state="exception"/>
+ </rule>
+ <rule pattern="(@[A-Za-z]\w*)(\s*)(::)?(\s*)([A-Za-z]\w*)(\s*)(\{)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="action"/>
+ </rule>
+ <rule pattern="((?:protected|private|public|fragment)\b)?(\s*)([A-Za-z]\w*)(!)?">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="rule-alts" state="rule-prelims"/>
+ </rule>
+ </state>
+ <state name="tokens">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="([A-Z]\w*)(\s*)(=)?(\s*)(\'(?:\\\\|\\\'|[^\']*)\')?(\s*)(;)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ <token type="LiteralString"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="options">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="([A-Za-z]\w*)(\s*)(=)(\s*)([A-Za-z]\w*|\'(?:\\\\|\\\'|[^\']*)\'|[0-9]+|\*)(\s*)(;)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ <token type="Text"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="rule-alts">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule pattern="options\b">
+ <token type="Keyword"/>
+ <push state="options"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="<<([^>]|>[^>])>>">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\$?[A-Z_]\w*">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\$?[a-z_]\w*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push state="nested-arg-action"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="action"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="rule-prelims">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule pattern="returns\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push state="nested-arg-action"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="action"/>
+ </rule>
+ <rule pattern="(throws)(\s+)([A-Za-z]\w*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(,)(\s*)([A-Za-z]\w*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule pattern="options\b">
+ <token type="Keyword"/>
+ <push state="options"/>
+ </rule>
+ <rule pattern="(scope)(\s+)(\{)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="action"/>
+ </rule>
+ <rule pattern="(scope)(\s+)([A-Za-z]\w*)(\s*)(;)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(@[A-Za-z]\w*)(\s*)(\{)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="action"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="action">
+ <rule pattern="([^${}\'"/\\]+|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|/(?!\*)(\\\\|\\/|[^/])*/|\\(?!%)|/)+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="(\\)(%)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Other"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$[a-zA-Z]+)(\.?)(text|value)?">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Punctuation"/>
+ <token type="NameProperty"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="comments">
+ <rule pattern="//.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="/\*(.|\n)*?\*/">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,74 @@
+<lexer>
+ <config>
+ <name>ApacheConf</name>
+ <alias>apacheconf</alias>
+ <alias>aconf</alias>
+ <alias>apache</alias>
+ <filename>.htaccess</filename>
+ <filename>apache.conf</filename>
+ <filename>apache2.conf</filename>
+ <mime_type>text/x-apacheconf</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(#.*?)$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(<[^\s>]+)(?:(\s+)(.*?))?(>)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([a-z]\w*)(\s+)">
+ <bygroups>
+ <token type="NameBuiltin"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="value"/>
+ </rule>
+ <rule pattern="\.+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="value">
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="$">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\d+\.\d+\.\d+\.\d+(?:/\d+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="/([a-z0-9][\w./-]+)">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="(on|off|none|any|all|double|email|dns|min|minimal|os|productonly|full|emerg|alert|crit|error|warn|notice|info|debug|registry|script|inetd|standalone|user|group)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern=""([^"\\]*(?:\\.[^"\\]*)*)"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[^\s"\\]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,59 @@
+<lexer>
+ <config>
+ <name>APL</name>
+ <alias>apl</alias>
+ <filename>*.apl</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[⍝#].*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\'((\'\')|[^\'])*\'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern=""(("")|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[⋄◇()]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[\[\];]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="[A-Za-zΔ∆⍙_][A-Za-zΔ∆⍙_¯0-9]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[\.\\/⌿⍀¨⍣⍨⍠⍤∘⍥@⌺⌶⍢]">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗⊆⍸]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="⍬">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="[⎕⍞]">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="[←→]">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="[⍺⍵⍶⍹∇:]">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="KeywordType"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,130 @@
+<lexer>
+ <config>
+ <name>AppleScript</name>
+ <alias>applescript</alias>
+ <filename>*.applescript</filename>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="¬\n">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="'s\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(--|#).*?$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="[(){}!,.:]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(«)([^»]+)(»)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameBuiltin"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b((?:considering|ignoring)\s*)(application responses|case|diacriticals|hyphens|numeric strings|punctuation|white space)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="NameBuiltin"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(and|or|is equal|equals|(is )?equal to|is not|isn't|isn't equal( to)?|is not equal( to)?|doesn't equal|does not equal|(is )?greater than|comes after|is not less than or equal( to)?|isn't less than or equal( to)?|(is )?less than|comes before|is not greater than or equal( to)?|isn't greater than or equal( to)?|(is )?greater than or equal( to)?|is not less than|isn't less than|does not come before|doesn't come before|(is )?less than or equal( to)?|is not greater than|isn't greater than|does not come after|doesn't come after|starts? with|begins? with|ends? with|contains?|does not contain|doesn't contain|is in|is contained by|is not in|is not contained by|isn't contained by|div|mod|not|(a )?(ref( to)?|reference to)|is|does)\b">
+ <token type="OperatorWord"/>
+ </rule>
@@ -0,0 +1,174 @@
+<lexer>
+ <config>
+ <name>ArangoDB AQL</name>
+ <alias>aql</alias>
+ <filename>*.aql</filename>
+ <mime_type>text/x-aql</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="comments-and-whitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comment"/>
+ </rule>
+ </state>
+ <state name="multiline-comment">
+ <rule pattern="[^*]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\*">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="double-quote">
+ <rule pattern="\\.">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[^"\\]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="single-quote">
+ <rule pattern="\\.">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="[^'\\]+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="backtick">
+ <rule pattern="\\.">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[^`\\]+">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="`">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="forwardtick">
+ <rule pattern="\\.">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[^´\\]+">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="´">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="identifier">
+ <rule pattern="(?:\$?|_+)[a-z]+[_a-z0-9]*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="`">
+ <token type="Name"/>
+ <push state="backtick"/>
+ </rule>
+ <rule pattern="´">
+ <token type="Name"/>
+ <push state="forwardtick"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="comments-and-whitespace"/>
+ </rule>
+ <rule pattern="0b[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0x[0-9a-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(?:0|[1-9][0-9]*)(?![\.e])">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(?:(?:0|[1-9][0-9]*)(?:\.[0-9]+)?|\.[0-9]+)(?:e[\-\+]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="@@(?:_+[a-z0-9]+[a-z0-9_]*|[a-z0-9][a-z0-9_]*)">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="@(?:_+[a-z0-9]+[a-z0-9_]*|[a-z0-9][a-z0-9_]*)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="=~|!~|[=!<>]=?|[%?:/*+-]|\.\.|&&|\|\|">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[.,(){}\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[a-zA-Z0-9][a-zA-Z0-9_]*(?:::[a-zA-Z0-9_]+)+(?=\s*\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(WITH)(\s+)(COUNT)(\s+)(INTO)\b">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="KeywordPseudo"/>
+ <token type="Text"/>
+ <token type="KeywordReserved"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?:KEEP|PRUNE|SEARCH|TO)\b">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="OPTIONS(?=\s*\{)">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="(?:AGGREGATE|ALL|ALL_SHORTEST_PATHS|AND|ANY|ASC|AT LEAST|COLLECT|DESC|DISTINCT|FILTER|FOR|GRAPH|IN|INBOUND|INSERT|INTO|K_PATHS|K_SHORTEST_PATHS|LIKE|LIMIT|NONE|NOT|OR|OUTBOUND|REMOVE|REPLACE|RETURN|SHORTEST_PATH|SORT|UPDATE|UPSERT|WITH|WINDOW)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="LET\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(?:true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(?-i)(?:CURRENT|NEW|OLD)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
@@ -0,0 +1,309 @@
+<lexer>
+ <config>
+ <name>Arduino</name>
+ <alias>arduino</alias>
+ <filename>*.ino</filename>
+ <mime_type>text/x-arduino</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="whitespace">
+ <rule pattern="^#if\s+0">
+ <token type="CommentPreproc"/>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^#">
+ <token type="CommentPreproc"/>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(\n|[\w\W]*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="macro">
+ <rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="CommentPreprocFile"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^/\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=\\)\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="statements">
+ <rule pattern="(reinterpret_cast|static_assert|dynamic_cast|thread_local|static_cast|const_cast|protected|constexpr|namespace|restrict|noexcept|override|operator|typename|template|explicit|decltype|nullptr|private|alignof|virtual|mutable|alignas|typeid|friend|throws|export|public|delete|final|using|throw|catch|this|try|new)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="char(16_t|32_t)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(class)\b">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ <token type="LiteralStringDelimiter"/>
+ <token type="LiteralStringDelimiter"/>
+ <token type="LiteralString"/>
+ <token type="LiteralStringDelimiter"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(u8|u|U)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(L?)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+[LlUu]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[0-7]+[LlUu]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="\d+[LlUu]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(restricted|volatile|continue|register|default|typedef|struct|extern|switch|sizeof|static|return|union|while|const|break|goto|enum|else|case|auto|for|asm|if|do)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(_Bool|_Complex|_Imaginary|array|atomic_bool|atomic_char|atomic_int|atomic_llong|atomic_long|atomic_schar|atomic_short|atomic_uchar|atomic_uint|atomic_ullong|atomic_ulong|atomic_ushort|auto|bool|boolean|BooleanVariables|Byte|byte|Char|char|char16_t|char32_t|class|complex|Const|const|const_cast|delete|double|dynamic_cast|enum|explicit|extern|Float|float|friend|inline|Int|int|int16_t|int32_t|int64_t|int8_t|Long|long|new|NULL|null|operator|private|PROGMEM|protected|public|register|reinterpret_cast|short|signed|sizeof|Static|static|static_cast|String|struct|typedef|uint16_t|uint32_t|uint64_t|uint8_t|union|unsigned|virtual|Void|void|Volatile|volatile|word)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(and|final|If|Loop|loop|not|or|override|setup|Setup|throw|try|xor)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(ANALOG_MESSAGE|BIN|CHANGE|DEC|DEFAULT|DIGITAL_MESSAGE|EXTERNAL|FALLING|FIRMATA_STRING|HALF_PI|HEX|HIGH|INPUT|INPUT_PULLUP|INTERNAL|INTERNAL1V1|INTERNAL1V1|INTERNAL2V56|INTERNAL2V56|LED_BUILTIN|LED_BUILTIN_RX|LED_BUILTIN_TX|LOW|LSBFIRST|MSBFIRST|OCT|OUTPUT|PI|REPORT_ANALOG|REPORT_DIGITAL|RISING|SET_PIN_MODE|SYSEX_START|SYSTEM_RESET|TWO_PI)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(boolean|const|byte|word|string|String|array)\b">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(Keyboard|KeyboardController|MouseController|SoftwareSerial|EthernetServer|EthernetClient|LiquidCrystal|RobotControl|GSMVoiceCall|EthernetUDP|EsploraTFT|HttpClient|RobotMotor|WiFiClient|GSMScanner|FileSystem|Scheduler|GSMServer|YunClient|YunServer|IPAddress|GSMClient|GSMModem|Keyboard|Ethernet|Console|GSMBand|Esplora|Stepper|Process|WiFiUDP|GSM_SMS|Mailbox|USBHost|Firmata|PImage|Client|Server|GSMPIN|FileIO|Bridge|Serial|EEPROM|Stream|Mouse|Audio|Servo|File|Task|GPRS|WiFi|Wire|TFT|GSM|SPI|SD)\b">
+ <token type="NameClass"/>
+ </rule>
@@ -0,0 +1,126 @@
+<lexer>
+ <config>
+ <name>ArmAsm</name>
+ <alias>armasm</alias>
+ <filename>*.s</filename>
+ <filename>*.S</filename>
+ <mime_type>text/x-armasm</mime_type>
+ <mime_type>text/x-asm</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="(\.\w+)([ \t]+\w+\s+?)?">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\w+)(:)(\s+\.\w+\s+)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Punctuation"/>
+ <token type="KeywordNamespace"/>
+ </bygroups>
+ <push state="literal"/>
+ </rule>
+ <rule pattern="(\w+)(:)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="svc\s+\w+">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="[a-zA-Z]+">
+ <token type="Text"/>
+ <push state="opcode"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[@;].*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="literal">
+ <rule pattern="0b[01]+">
+ <token type="LiteralNumberBin"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="0x\w{1,8}">
+ <token type="LiteralNumberHex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="0\d+">
+ <token type="LiteralNumberOct"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\d+?\.\d+?">
+ <token type="LiteralNumberFloat"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(")(.+)(")">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="LiteralStringDouble"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(')(.{1}|\\.{1})(')">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="LiteralStringChar"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="opcode">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(@|;).*\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(\s+|,)">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[rapcfxwbhsdqv]\d{1,2}">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="=0x\w+">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(=)(\w+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule pattern="#">
+ <token type="Text"/>
+ <push state="literal"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,165 @@
+<lexer>
+ <config>
+ <name>ATL</name>
+ <alias>atl</alias>
+ <filename>*.atl</filename>
+ <mime_type>text/x-atl</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(--.*?)(\n)">
+ <bygroups>
+ <token type="CommentSingle" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ </rule>
+ <rule pattern="(and|distinct|endif|else|for|foreach|if|implies|in|let|not|or|self|super|then|thisModule|xor)\b">
+ <token type="Keyword" />
+ </rule>
+ <rule pattern="(OclUndefined|true|false|#\w+)\b">
+ <token type="KeywordConstant" />
+ </rule>
+ <rule pattern="(module|query|library|create|from|to|uses)\b">
+ <token type="KeywordNamespace" />
+ </rule>
+ <rule pattern="(do)(\s*)({)">
+ <bygroups>
+ <token type="KeywordNamespace" />
+ <token type="TextWhitespace" />
+ <token type="Punctuation" />
+ </bygroups>
+ </rule>
+ <rule pattern="(abstract|endpoint|entrypoint|lazy|unique)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ </rule>
+ <rule pattern="(rule)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ </rule>
+ <rule pattern="(helper)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ </rule>
+ <rule pattern="(context)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ </rule>
+ <rule pattern="(def)(\s*)(:)(\s*)">
+ <bygroups>
+ <token type="KeywordNamespace" />
+ <token type="TextWhitespace" />
+ <token type="Punctuation" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ </rule>
+ <rule pattern="(Bag|Boolean|Integer|OrderedSet|Real|Sequence|Set|String|Tuple)">
+ <token type="KeywordType" />
+ </rule>
+ <rule pattern="(\w+)(\s*)(<-|<:=)">
+ <bygroups>
+ <token type="NameNamespace" />
+ <token type="TextWhitespace" />
+ <token type="Punctuation" />
+ </bygroups>
+ </rule>
+ <rule pattern="#"">
+ <token type="KeywordConstant" />
+ <push state="quotedenumliteral" />
+ </rule>
+ <rule pattern=""">
+ <token type="NameNamespace" />
+ <push state="quotedname" />
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="TextWhitespace" />
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString" />
+ <push state="string" />
+ </rule>
+ <rule
+ pattern="[0-9]*\.[0-9]+">
+ <token type="LiteralNumberFloat" />
+ </rule>
+ <rule pattern="0|[1-9][0-9]*">
+ <token type="LiteralNumberInteger" />
+ </rule>
+ <rule pattern="[*<>+=/-]">
+ <token type="Operator" />
+ </rule>
+ <rule pattern="([{}();:.,!|]|->)">
+ <token type="Punctuation" />
+ </rule>
+ <rule pattern="\n">
+ <token type="TextWhitespace" />
+ </rule>
+ <rule pattern="\w+">
+ <token type="NameNamespace" />
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\']+">
+ <token type="LiteralString" />
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralString" />
+ </rule>
+ <rule pattern="\\'">
+ <token type="LiteralString" />
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString" />
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString" />
+ <pop depth="1" />
+ </rule>
+ </state>
+ <state name="quotedname">
+ <rule pattern="[^\\"]+">
+ <token type="NameNamespace" />
+ </rule>
+ <rule pattern="\\\\">
+ <token type="NameNamespace" />
+ </rule>
+ <rule pattern="\\"">
+ <token type="NameNamespace" />
+ </rule>
+ <rule pattern="\\">
+ <token type="NameNamespace" />
+ </rule>
+ <rule pattern=""">
+ <token type="NameNamespace" />
+ <pop depth="1" />
+ </rule>
+ </state>
+ <state name="quotedenumliteral">
+ <rule pattern="[^\\"]+">
+ <token type="KeywordConstant" />
+ </rule>
+ <rule pattern="\\\\">
+ <token type="KeywordConstant" />
+ </rule>
+ <rule pattern="\\"">
+ <token type="KeywordConstant" />
+ </rule>
+ <rule pattern="\\">
+ <token type="KeywordConstant" />
+ </rule>
+ <rule pattern=""">
+ <token type="KeywordConstant" />
+ <pop depth="1" />
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,78 @@
+
+<lexer>
+ <config>
+ <name>AutoHotkey</name>
+ <alias>autohotkey</alias>
+ <alias>ahk</alias>
+ <filename>*.ahk</filename>
+ <filename>*.ahkl</filename>
+ <mime_type>text/x-autohotkey</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^(\s*)(/\*)"><bygroups><token type="Text"/><token type="CommentMultiline"/></bygroups><push state="incomment"/></rule>
+ <rule pattern="^(\s*)(\()"><bygroups><token type="Text"/><token type="Generic"/></bygroups><push state="incontinuation"/></rule>
+ <rule pattern="\s+;.*?$"><token type="CommentSingle"/></rule>
+ <rule pattern="^;.*?$"><token type="CommentSingle"/></rule>
+ <rule pattern="[]{}(),;[]"><token type="Punctuation"/></rule>
+ <rule pattern="(in|is|and|or|not)\b"><token type="OperatorWord"/></rule>
+ <rule pattern="\%[a-zA-Z_#@$][\w#@$]*\%"><token type="NameVariable"/></rule>
+ <rule pattern="!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]"><token type="Operator"/></rule>
+ <rule><include state="commands"/></rule>
+ <rule><include state="labels"/></rule>
+ <rule><include state="builtInFunctions"/></rule>
+ <rule><include state="builtInVariables"/></rule>
+ <rule pattern="""><token type="LiteralString"/><combined state="stringescape" state="dqs"/></rule>
+ <rule><include state="numbers"/></rule>
+ <rule pattern="[a-zA-Z_#@$][\w#@$]*"><token type="Name"/></rule>
+ <rule pattern="\\|\'"><token type="Text"/></rule>
+ <rule pattern="\`([,%`abfnrtv\-+;])"><token type="LiteralStringEscape"/></rule>
+ <rule><include state="garbage"/></rule>
+ </state>
+ <state name="incomment">
+ <rule pattern="^\s*\*/"><token type="CommentMultiline"/><pop depth="1"/></rule>
+ <rule pattern="[^*]+"><token type="CommentMultiline"/></rule>
+ <rule pattern="\*"><token type="CommentMultiline"/></rule>
+ </state>
+ <state name="incontinuation">
+ <rule pattern="^\s*\)"><token type="Generic"/><pop depth="1"/></rule>
+ <rule pattern="[^)]"><token type="Generic"/></rule>
+ <rule pattern="[)]"><token type="Generic"/></rule>
+ </state>
+ <state name="commands">
@@ -0,0 +1,70 @@
+
+<lexer>
+ <config>
+ <name>AutoIt</name>
+ <alias>autoit</alias>
+ <filename>*.au3</filename>
+ <mime_type>text/x-autoit</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern=";.*\n"><token type="CommentSingle"/></rule>
+ <rule pattern="(#comments-start|#cs)(.|\n)*?(#comments-end|#ce)"><token type="CommentMultiline"/></rule>
+ <rule pattern="[\[\]{}(),;]"><token type="Punctuation"/></rule>
+ <rule pattern="(and|or|not)\b"><token type="OperatorWord"/></rule>
+ <rule pattern="[$|@][a-zA-Z_]\w*"><token type="NameVariable"/></rule>
+ <rule pattern="!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]"><token type="Operator"/></rule>
+ <rule><include state="commands"/></rule>
+ <rule><include state="labels"/></rule>
+ <rule><include state="builtInFunctions"/></rule>
+ <rule><include state="builtInMarcros"/></rule>
+ <rule pattern="""><token type="LiteralString"/><combined state="stringescape" state="dqs"/></rule>
+ <rule pattern="'"><token type="LiteralString"/><push state="sqs"/></rule>
+ <rule><include state="numbers"/></rule>
+ <rule pattern="[a-zA-Z_#@$][\w#@$]*"><token type="Name"/></rule>
+ <rule pattern="\\|\'"><token type="Text"/></rule>
+ <rule pattern="\`([,%`abfnrtv\-+;])"><token type="LiteralStringEscape"/></rule>
+ <rule pattern="_\n"><token type="Text"/></rule>
+ <rule><include state="garbage"/></rule>
+ </state>
+ <state name="commands">
+ <rule pattern="(?i)(\s*)(#include-once|#include|#endregion|#forcedef|#forceref|#region|and|byref|case|continueloop|dim|do|else|elseif|endfunc|endif|endselect|exit|exitloop|for|func|global|if|local|next|not|or|return|select|step|then|to|until|wend|while|exit)\b"><bygroups><token type="Text"/><token type="NameBuiltin"/></bygroups></rule>
+ </state>
+ <state name="builtInFunctions">
@@ -0,0 +1,95 @@
+<lexer>
+ <config>
+ <name>Awk</name>
+ <alias>awk</alias>
+ <alias>gawk</alias>
+ <alias>mawk</alias>
+ <alias>nawk</alias>
+ <filename>*.awk</filename>
+ <mime_type>application/x-awk</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^(?=\s|/)">
+ <token type="Text"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="\+\+|--|\|\||&&|in\b|\$|!?~|\|&|(\*\*|[-<>+*%\^/!=|])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(break|continue|do|while|exit|for|if|else|return|switch|case|default)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="function\b">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|length|match|split|patsplit|sprintf|sub|substr|tolower|toupper|close|fflush|getline|next(file)|print|printf|strftime|systime|mktime|delete|system|strtonum|and|compl|lshift|or|rshift|asorti?|isarray|bindtextdomain|dcn?gettext|@(include|load|namespace))\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(ARGC|ARGIND|ARGV|BEGIN(FILE)?|BINMODE|CONVFMT|ENVIRON|END(FILE)?|ERRNO|FIELDWIDTHS|FILENAME|FNR|FPAT|FS|IGNORECASE|LINT|NF|NR|OFMT|OFS|ORS|PROCINFO|RLENGTH|RS|RSTART|RT|SUBSEP|TEXTDOMAIN)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[@$a-zA-Z_]\w*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/\B">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=/)">
+ <token type="Text"/>
+ <push state="#pop" state="badregex"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="badregex">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,97 @@
+<lexer>
+ <config>
+ <name>Ballerina</name>
+ <alias>ballerina</alias>
+ <filename>*.bal</filename>
+ <mime_type>text/x-ballerina</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(break|catch|continue|done|else|finally|foreach|forever|fork|if|lock|match|return|throw|transaction|try|while)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="@[^\W\d][\w.]*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="(annotation|bind|but|endpoint|error|function|object|private|public|returns|service|type|var|with|worker)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(boolean|byte|decimal|float|int|json|map|nil|record|string|table|xml)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(import)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(\.)((?:[^\W\d]|\$)[\w$]*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameAttribute"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^\s*([^\W\d]|\$)[\w$]*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="([^\W\d]|\$)[\w$]*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[bB][01][01_]*[lL]?">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[0-7_]+[lL]?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0|[1-9][0-9_]*[lL]?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[~^*!%&\[\](){}<>|+=:;,./?-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="[\w.]+">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,220 @@
+<lexer>
+ <config>
+ <name>Bash</name>
+ <alias>bash</alias>
+ <alias>sh</alias>
+ <alias>ksh</alias>
+ <alias>zsh</alias>
+ <alias>shell</alias>
+ <filename>*.sh</filename>
+ <filename>*.ksh</filename>
+ <filename>*.bash</filename>
+ <filename>*.ebuild</filename>
+ <filename>*.eclass</filename>
+ <filename>.env</filename>
+ <filename>*.env</filename>
+ <filename>*.exheres-0</filename>
+ <filename>*.exlib</filename>
+ <filename>*.zsh</filename>
+ <filename>*.zshrc</filename>
+ <filename>.bashrc</filename>
+ <filename>bashrc</filename>
+ <filename>.bash_*</filename>
+ <filename>bash_*</filename>
+ <filename>zshrc</filename>
+ <filename>.zshrc</filename>
+ <filename>PKGBUILD</filename>
+ <mime_type>application/x-sh</mime_type>
+ <mime_type>application/x-shellscript</mime_type>
+ <analyse first="true" >
+ <regex pattern="(?m)^#!.*/bin/(?:env |)(?:bash|zsh|sh|ksh)" score="1.0" />
+ </analyse>
+ </config>
+ <rules>
+ <state name="data">
+ <rule pattern="(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="(?s)'.*?'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="&">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\|">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\d+(?= |$)">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[^=\s\[\]{}()$"\'`\\<&|;]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="<">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule>
+ <include state="interp"/>
+ </rule>
+ </state>
+ <state name="interp">
+ <rule pattern="\$\(\(">
+ <token type="Keyword"/>
+ <push state="math"/>
+ </rule>
+ <rule pattern="\$\(">
+ <token type="Keyword"/>
+ <push state="paren"/>
+ </rule>
+ <rule pattern="\$\{#?">
+ <token type="LiteralStringInterpol"/>
+ <push state="curly"/>
+ </rule>
+ <rule pattern="\$[a-zA-Z_]\w*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\$(?:\d+|[#$?!_*@-])">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="paren">
+ <rule pattern="\)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="math">
+ <rule pattern="\)\)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[-+*/%^|&]|\*\*|\|\|">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\d+#\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+#(?! )">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="backticks">
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <push state="backticks"/>
+ </rule>
+ <rule>
+ <include state="data"/>
+ </rule>
+ <rule>
+ <include state="interp"/>
+ </rule>
+ </state>
+ <state name="basic">
+ <rule pattern="\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\s)`])">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="\A#!.+\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="#.*(\S|$)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\\[\w\W]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="(\b\w+)(\s*)(\+?=)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[\[\]{}()=]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="<<<">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="&&|\|\|">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ <state name="curly">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=":-">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[^}:"\'`$\\]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,25 @@
+<lexer>
+ <config>
+ <name>Bash Session</name>
+ <alias>bash-session</alias>
+ <alias>console</alias>
+ <alias>shell-session</alias>
+ <filename>*.sh-session</filename>
+ <mime_type>text/x-sh</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^((?:\[[^]]+@[^]]+\]\s?)?[#$%>])(\s*)(.*\n?)">
+ <bygroups>
+ <token type="GenericPrompt"/>
+ <token type="Text"/>
+ <using lexer="bash"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^.+\n?">
+ <token type="GenericOutput"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,660 @@
+<lexer>
+ <config>
+ <name>Batchfile</name>
+ <alias>bat</alias>
+ <alias>batch</alias>
+ <alias>dosbatch</alias>
+ <alias>winbatch</alias>
+ <filename>*.bat</filename>
+ <filename>*.cmd</filename>
+ <mime_type>application/x-dos-batch</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="arithmetic">
+ <rule pattern="0[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0x[\da-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[(),]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="([=+\-*/!~]|%|\^\^)+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[\w\W])+">
+ <usingself state="variable"/>
+ </rule>
+ <rule pattern="(?=[\x00|&])">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="follow"/>
+ </rule>
+ </state>
+ <state name="else?">
+ <rule pattern="(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)">
+ <usingself state="text"/>
+ </rule>
+ <rule pattern="else(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="sqstring">
+ <rule>
+ <include state="variable-or-escape"/>
+ </rule>
+ <rule pattern="[^%]+|%">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\)((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))">
+ <token type="Text"/>
+ <push state="follow"/>
+ </rule>
+ <rule pattern="(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)">
+ <usingself state="text"/>
+ </rule>
+ <rule>
+ <include state="redirect"/>
+ </rule>
+ <rule pattern="[\n\x1a]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="root/compound"/>
+ </rule>
+ <rule pattern="@+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ </bygroups>
+ <push state="follow"/>
+ </rule>
+ <rule pattern="(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|])*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ </bygroups>
+ <push state="follow"/>
+ </rule>
+ <rule pattern="(setlocal|endlocal|prompt|verify|rename|mklink|rmdir|shift|start|color|dpath|title|chdir|erase|pushd|ftype|break|pause|mkdir|assoc|date|path|time|popd|keys|exit|type|copy|echo|move|dir|del|ren|ver|cls|vol|rd|md|cd)(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])">
+ <token type="Keyword"/>
+ <push state="follow"/>
+ </rule>
+ <rule pattern="(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="call"/>
+ </rule>
+ <rule pattern="call(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="for/f" state="for"/>
+ </rule>
+ <rule pattern="(for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="for/l" state="for"/>
+ </rule>
+ <rule pattern="for(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])(?!\^)">
+ <token type="Keyword"/>
+ <push state="for2" state="for"/>
+ </rule>
+ <rule pattern="(goto(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="label"/>
+ </rule>
+ <rule pattern="(if(?:(?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ </bygroups>
+ <push state="(?" state="if"/>
+ </rule>
+ <rule pattern="rem(((?=\()|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])(?:(?:[^\n\x1a^]|\^[\n\x1a]?[\w\W])*))">
+ <token type="CommentSingle"/>
+ <push state="follow"/>
+ </rule>
+ <rule pattern="(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="arithmetic"/>
+ </rule>
+ <rule pattern="(set(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <usingself state="variable"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="follow"/>
+ </rule>
+ <rule>
+ <push state="follow"/>
+ </rule>
+ </state>
+ <state name="follow">
+ <rule pattern="((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))(.*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameLabel"/>
+ <token type="CommentSingle"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="redirect"/>
+ </rule>
+ <rule pattern="(?=[\n\x1a])">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\|\|?|&&?">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="text"/>
+ </rule>
+ </state>
+ <state name="bqstring">
+ <rule>
+ <include state="variable-or-escape"/>
+ </rule>
+ <rule pattern="[^%]+|%">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ <state name="for2">
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(do(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))">
+ <bygroups>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\n\x1a]+">
+ <token type="Text"/>
+ </rule>
+ <rule>
+ <include state="follow"/>
+ </rule>
+ </state>
+ <state name="label/compound">
+ <rule pattern="(?=\))">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[^)]|[^"%^\n\x1a&<>|)])*)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="CommentSingle"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="for">
+ <rule pattern="((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(in)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\()">
+ <bygroups>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="follow"/>
+ </rule>
+ </state>
+ <state name="redirect/compound">
+ <rule pattern="((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)">
+ <bygroups>
+ <token type="LiteralNumberInteger"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="LiteralNumberInteger"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?<!\^[\n\x1a])\d)?)(>>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+))+))">
+ <bygroups>
+ <token type="LiteralNumberInteger"/>
+ <token type="Punctuation"/>
+ <usingself state="text"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="if">
+ <rule pattern="((?:cmdextversion|errorlevel)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(\d+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="LiteralNumberInteger"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(defined(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <usingself state="variable"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(exist(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="((?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))">
+ <bygroups>
+ <usingself state="arithmetic"/>
+ <token type="OperatorWord"/>
+ <usingself state="arithmetic"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)">
+ <usingself state="text"/>
+ <push state="#pop" state="if2"/>
+ </rule>
+ </state>
+ <state name="root/compound">
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:))">
+ <token type="Text"/>
+ <push state="follow/compound"/>
+ </rule>
+ <rule pattern="(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)">
+ <usingself state="text"/>
+ </rule>
+ <rule>
+ <include state="redirect/compound"/>
+ </rule>
+ <rule pattern="[\n\x1a]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="root/compound"/>
+ </rule>
+ <rule pattern="@+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="((?:for|if|rem)(?:(?=(?:\^[\n\x1a]?)?/)|(?:(?!\^)|(?<=m))(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0)])+)?(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ </bygroups>
+ <push state="follow/compound"/>
+ </rule>
+ <rule pattern="(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*(?:\^[\n\x1a]?)?/(?:\^[\n\x1a]?)?\?(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"%\n\x1a&<>|)])*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ </bygroups>
+ <push state="follow/compound"/>
+ </rule>
+ <rule pattern="(setlocal|endlocal|prompt|verify|rename|mklink|rmdir|shift|start|color|dpath|title|chdir|erase|pushd|ftype|break|pause|mkdir|assoc|date|path|time|popd|keys|exit|type|copy|echo|move|dir|del|ren|ver|cls|vol|rd|md|cd)(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))">
+ <token type="Keyword"/>
+ <push state="follow/compound"/>
+ </rule>
+ <rule pattern="(call)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="call/compound"/>
+ </rule>
+ <rule pattern="call(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/f(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="for/f" state="for"/>
+ </rule>
+ <rule pattern="(for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))(/l(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="for/l" state="for"/>
+ </rule>
+ <rule pattern="for(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a]))(?!\^)">
+ <token type="Keyword"/>
+ <push state="for2" state="for"/>
+ </rule>
+ <rule pattern="(goto(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(:?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="label/compound"/>
+ </rule>
+ <rule pattern="(if(?:(?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?!\^))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:/i(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)((?:not(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))?)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ </bygroups>
+ <push state="(?" state="if"/>
+ </rule>
+ <rule pattern="rem(((?=\()|(?:(?=\))|(?=\^?[\t\v\f\r ,;=\xa0]|[&<>|\n\x1a])))(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+)?.*|(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(]))(?:(?:[^\n\x1a^)]|\^[\n\x1a]?[^)])*))">
+ <token type="CommentSingle"/>
+ <push state="follow/compound"/>
+ </rule>
+ <rule pattern="(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)(/a)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="arithmetic/compound"/>
+ </rule>
+ <rule pattern="(set(?:(?=\))|(?=(?:\^[\n\x1a]?)?[\t\v\f\r ,;=\xa0+./:[\\\]]|[\n\x1a&<>|(])))((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:/p)?)((?:(?:\^[\n\x1a]?)?[^\S\n])*)((?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|^=)]|\^[\n\x1a]?[^"=])+)?)((?:(?:\^[\n\x1a]?)?=)?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <token type="Keyword"/>
+ <usingself state="text"/>
+ <usingself state="variable"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="follow/compound"/>
+ </rule>
+ <rule>
+ <push state="follow/compound"/>
+ </rule>
+ </state>
+ <state name="follow/compound">
+ <rule pattern="(?=\))">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="((?:(?<=^[^:])|^[^:]?)[\t\v\f\r ,;=\xa0]*)(:)([\t\v\f\r ,;=\xa0]*)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))(.*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameLabel"/>
+ <token type="CommentSingle"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="redirect/compound"/>
+ </rule>
+ <rule pattern="(?=[\n\x1a])">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\|\|?|&&?">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="text"/>
+ </rule>
+ </state>
+ <state name="text">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule>
+ <include state="variable-or-escape"/>
+ </rule>
+ <rule pattern="[^"%^\n\x1a&<>|\t\v\f\r ,;=\xa0\d)]+|.">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="redirect">
+ <rule pattern="((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])\d)?)(>>?&|<&)([\n\x1a\t\v\f\r ,;=\xa0]*)(\d)">
+ <bygroups>
+ <token type="LiteralNumberInteger"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="LiteralNumberInteger"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?:(?<=[\n\x1a\t\v\f\r ,;=\xa0])(?<!\^[\n\x1a])\d)?)(>>?|<)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))">
+ <bygroups>
+ <token type="LiteralNumberInteger"/>
+ <token type="Punctuation"/>
+ <usingself state="text"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="label">
+ <rule pattern="((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*)?)((?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|\^[\n\x1a]?[\w\W]|[^"%^\n\x1a&<>|])*)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="CommentSingle"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="arithmetic/compound">
+ <rule pattern="(?=\))">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="0[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0x[\da-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[(),]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="([=+\-*/!~]|%|\^\^)+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="((?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(\^[\n\x1a]?)?[^()=+\-*/!~%^"\n\x1a&<>|\t\v\f\r ,;=\xa0]|\^[\n\x1a\t\v\f\r ,;=\xa0]?[^)])+">
+ <usingself state="variable"/>
+ </rule>
+ <rule pattern="(?=[\x00|&])">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="follow"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\^!|%%">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^"%^\n\x1a]+|[%^]">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="variable">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule>
+ <include state="variable-or-escape"/>
+ </rule>
+ <rule pattern="[^"%^\n\x1a]+|.">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="call/compound">
+ <rule pattern="(?=\))">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^)]|\^[\n\x1a]?[^)])*))">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="for/f">
+ <rule pattern="(")((?:(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[^"])*?")([\n\x1a\t\v\f\r ,;=\xa0]*)(\))">
+ <bygroups>
+ <token type="LiteralStringDouble"/>
+ <usingself state="string"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="#pop" state="for2" state="string"/>
+ </rule>
+ <rule pattern="('(?:%%|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[\w\W])*?')([\n\x1a\t\v\f\r ,;=\xa0]*)(\))">
+ <bygroups>
+ <usingself state="sqstring"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(`(?:%%|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|[\w\W])*?`)([\n\x1a\t\v\f\r ,;=\xa0]*)(\))">
+ <bygroups>
+ <usingself state="bqstring"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="for2"/>
+ </rule>
+ </state>
+ <state name="for/l">
+ <rule pattern="-?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule>
+ <include state="for2"/>
+ </rule>
+ </state>
+ <state name="if2">
+ <rule pattern="((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?)(==)((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)?(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))">
+ <bygroups>
+ <usingself state="text"/>
+ <token type="Operator"/>
+ <usingself state="text"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+))((?:equ|geq|gtr|leq|lss|neq))((?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)(?:[&<>|]+|(?:(?:"[^\n\x1a"]*(?:"|(?=[\n\x1a])))|(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))|(?:(?:(?:\^[\n\x1a]?)?[^"\n\x1a&<>|\t\v\f\r ,;=\xa0])+))+))">
+ <bygroups>
+ <usingself state="text"/>
+ <token type="OperatorWord"/>
+ <usingself state="text"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="(?">
+ <rule pattern="(?:(?:(?:\^[\n\x1a])?[\t\v\f\r ,;=\xa0])+)">
+ <usingself state="text"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="#pop" state="else?" state="root/compound"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="call">
+ <rule pattern="(:?)((?:(?:[^\n\x1a&<>|\t\v\f\r ,;=\xa0+:^]|\^[\n\x1a]?[\w\W])*))">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="variable-or-escape">
+ <rule pattern="(?:(?:%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|[^%:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%\n\x1a^]|\^[^%\n\x1a])[^=\n\x1a]*=(?:[^%\n\x1a^]|\^[^%\n\x1a])*)?)?%))|(?:\^?![^!:\n\x1a]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^!\n\x1a^]|\^[^!\n\x1a])[^=\n\x1a]*=(?:[^!\n\x1a^]|\^[^!\n\x1a])*)?)?\^?!))">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="%%|\^[\n\x1a]?(\^!|[\w\W])">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,120 @@
+<lexer>
+ <config>
+ <name>Beef</name>
+ <alias>beef</alias>
+ <filename>*.bf</filename>
+ <mime_type>text/x-beef</mime_type>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^\s*\[.*?\]">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="///[^\n\r]*">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="//[^\n\r]*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/[*].*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[~!%^&*()+=|\[\]:;,.<>/?-]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="@"(""|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\$@?"(""|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"\n])*["\n]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+[Ll]?|\d[_\d]*(\.\d*)?([eE][+-]?\d+)?[flFLdD]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma|nullable)\b">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\b(extern)(\s+)(alias)\b">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(as|await|base|break|by|case|catch|checked|continue|default|delegate|else|event|finally|fixed|for|repeat|goto|if|in|init|is|let|lock|new|scope|on|out|params|readonly|ref|return|sizeof|stackalloc|switch|this|throw|try|typeof|unchecked|virtual|void|while|get|set|new|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(global)(::)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(abstract|async|const|enum|explicit|extern|implicit|internal|operator|override|partial|extension|private|protected|public|static|sealed|unsafe|volatile)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(bool|byte|char8|char16|char32|decimal|double|float|int|int8|int16|int32|int64|long|object|sbyte|short|string|uint|uint8|uint16|uint32|uint64|uint|let|var)\b\??">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(class|struct|record|interface)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="(namespace|using)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="namespace"/>
+ </rule>
+ <rule pattern="@?[_a-zA-Z]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="class">
+ <rule pattern="@?[_a-zA-Z]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="namespace">
+ <rule pattern="(?=\()">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(@?[_a-zA-Z]\w*|\.)+">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,152 @@
+<lexer>
+ <config>
+ <name>BibTeX</name>
+ <alias>bib</alias>
+ <alias>bibtex</alias>
+ <filename>*.bib</filename>
+ <mime_type>text/x-bibtex</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="closing-brace">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[})]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="braced-string">
+ <rule pattern="\{">
+ <token type="LiteralString"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\{\}]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="value">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[a-z_@!$&*+\-./:;<>?\[\\\]^`|~][\w@!$&*+\-./:;<>?\[\\\]^`|~]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="quoted-string"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="LiteralString"/>
+ <push state="braced-string"/>
+ </rule>
+ <rule pattern="[\d]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="#">
+ <token type="Punctuation"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="quoted-string">
+ <rule pattern="\{">
+ <token type="LiteralString"/>
+ <push state="braced-string"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\{\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="@comment">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="@preamble">
+ <token type="NameClass"/>
+ <push state="closing-brace" state="value" state="opening-brace"/>
+ </rule>
+ <rule pattern="@string">
+ <token type="NameClass"/>
+ <push state="closing-brace" state="field" state="opening-brace"/>
+ </rule>
+ <rule pattern="@[a-z_@!$&*+\-./:;<>?\[\\\]^`|~][\w@!$&*+\-./:;<>?\[\\\]^`|~]*">
+ <token type="NameClass"/>
+ <push state="closing-brace" state="command-body" state="opening-brace"/>
+ </rule>
+ <rule pattern=".+">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="command-body">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[^\s\,\}]+">
+ <token type="NameLabel"/>
+ <push state="#pop" state="fields"/>
+ </rule>
+ </state>
+ <state name="fields">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ <push state="field"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="=">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="=">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="field">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[a-z_@!$&*+\-./:;<>?\[\\\]^`|~][\w@!$&*+\-./:;<>?\[\\\]^`|~]*">
+ <token type="NameAttribute"/>
+ <push state="value" state="="/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="opening-brace">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[{(]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,84 @@
+<lexer>
+ <config>
+ <name>Bicep</name>
+ <alias>bicep</alias>
+ <filename>*.bicep</filename>
+ </config>
+ <rules>
+ <state name="interp">
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interp-inside"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[^'\\$]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="interp-inside">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="//[^\n\r]+">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="'''.*?'''">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="interp"/>
+ </rule>
+ <rule pattern="#[\w-]+\b">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="[\w_]+(?=\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="\b(metadata|targetScope|resource|module|param|var|output|for|in|if|existing|import|as|type|with|using|func|assert)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="\b(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(>=|>|<=|<|==|!=|=~|!~|::|&&|\?\?|!|-|%|\*|\/|\+)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(\(|\)|\[|\]|\.|:|\?|{|}|@|,|\||=>|=)">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[\w_]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,141 @@
+<lexer>
+ <config>
+ <name>BlitzBasic</name>
+ <alias>blitzbasic</alias>
+ <alias>b3d</alias>
+ <alias>bplus</alias>
+ <filename>*.bb</filename>
+ <filename>*.decls</filename>
+ <mime_type>text/x-bb</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="""">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""C?">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[0-9]+\.[0-9]*(?!\.)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\.[0-9]+(?!\.)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\$[0-9a-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\%[10]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="\b(Before|Handle|After|First|Float|Last|Sgn|Abs|Not|And|Int|Mod|Str|Sar|Shr|Shl|Or)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="([+\-*/~=<>^])">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[(),:\[\]\\]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\.([ \t]*)([a-z]\w*)">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="\b(New)\b([ \t]+)([a-z]\w*)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(Gosub|Goto)\b([ \t]+)([a-z]\w*)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(Object)\b([ \t]*)([.])([ \t]*)([a-z]\w*)\b">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?\b([ \t]*)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(Function)\b([ \t]+)([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(Type)([ \t]+)([a-z]\w*)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(Pi|True|False|Null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="\b(Local|Global|Const|Field|Dim)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="\b(Function|Restore|Default|Forever|Include|Return|Repeat|ElseIf|Delete|Insert|Select|EndIf|Until|While|Gosub|Type|Goto|Else|Data|Next|Step|Each|Case|Wend|Exit|Read|Then|For|New|Asc|Len|Chr|End|To|If)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="([a-z]\w*)(?:([ \t]*)(@{1,2}|[#$%])|([ \t]*)([.])([ \t]*)(?:([a-z]\w*)))?">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,28 @@
+<lexer>
+ <config>
+ <name>BNF</name>
+ <alias>bnf</alias>
+ <filename>*.bnf</filename>
+ <mime_type>text/x-bnf</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(<)([ -;=?-~]+)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameClass"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="::=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[^<>:]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,83 @@
+<lexer>
+ <config>
+ <name>BQN</name>
+ <alias>bqn</alias>
+ <filename>*.bqn</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\A#!.+$">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern=""(?:[^"]|"")*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="[⟨⟩\[\]‿]">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="[()]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[:;?]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[⋄,]">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="[←⇐↩→]">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="'.'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="[˙˜˘¨⌜⁼´˝`]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[∘○⊸⟜⌾⊘◶⎉⚇⍟⎊]">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="[𝔽𝔾𝕎𝕏𝕊+\-×÷⋆√⌊⌈|¬∧∨<>≠=≤≥≡≢⊣⊢⥊∾≍⋈↑↓↕«»⌽⍉/⍋⍒⊏⊑⊐⊒∊⍷⊔!⍕⍎]">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="[𝕗𝕘𝕨𝕩𝕤]">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="·">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="@">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="\d+(?:\.\d+)?[eE]¯?\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[¯∞π]?(?:\d*\.?\b\d+(?:e[+¯]?\d+|E[+¯]?\d+)?|¯|∞|π)(?:j¯?(?:(?:\d+(?:\.\d+)?|\.\d+)(?:e[+¯]?\d+|E[+¯]?\d+)?|¯|∞|π))?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(•?[a-z][A-Z_a-z0-9π∞¯]*|𝕣)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="•?[A-Z][A-Z_a-z0-9π∞¯]*">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(•?_[A-Za-z][A-Z_a-z0-9π∞¯]*|_𝕣)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(•?_[A-Za-z][A-Z_a-z0-9π∞¯]*_|_𝕣_)">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,51 @@
+<lexer>
+ <config>
+ <name>Brainfuck</name>
+ <alias>brainfuck</alias>
+ <alias>bf</alias>
+ <filename>*.bf</filename>
+ <filename>*.b</filename>
+ <mime_type>application/x-brainfuck</mime_type>
+ </config>
+ <rules>
+ <state name="common">
+ <rule pattern="[.,]+">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern="[+-]+">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[<>]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[^.,+\-<>\[\]]+">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\[">
+ <token type="Keyword"/>
+ <push state="loop"/>
+ </rule>
+ <rule pattern="\]">
+ <token type="Error"/>
+ </rule>
+ <rule>
+ <include state="common"/>
+ </rule>
+ </state>
+ <state name="loop">
+ <rule pattern="\[">
+ <token type="Keyword"/>
+ <push/>
+ </rule>
+ <rule pattern="\]">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="common"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,121 @@
+<lexer>
+ <config>
+ <name>C#</name>
+ <alias>csharp</alias>
+ <alias>c#</alias>
+ <filename>*.cs</filename>
+ <mime_type>text/x-csharp</mime_type>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^\s*\[.*?\]">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="///[^\n\r]*">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="//[^\n\r]*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/[*].*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[~!%^&*()+=|\[\]:;,.<>/?-]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="@"(""|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\$@?"(""|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"\n])*["\n]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+[Ll]?|\d[_\d]*(\.\d*)?([eE][+-]?\d+)?[flFLdD]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma|nullable)\b">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\b(extern)(\s+)(alias)\b">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(as|await|base|break|by|case|catch|checked|continue|default|delegate|do|else|event|finally|fixed|for|foreach|goto|if|in|init|is|let|lock|new|on|out|params|readonly|ref|return|sizeof|stackalloc|switch|this|throw|try|typeof|unchecked|virtual|void|while|get|set|new|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(global)(::)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(abstract|async|const|enum|explicit|extern|implicit|internal|operator|override|partial|private|protected|public|static|sealed|unsafe|volatile)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(bool|byte|char|decimal|double|dynamic|float|int|long|object|sbyte|short|string|uint|ulong|ushort|var)\b\??">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(class|struct|record|interface)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="(namespace|using)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="namespace"/>
+ </rule>
+ <rule pattern="@?[_a-zA-Z]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="class">
+ <rule pattern="@?[_a-zA-Z]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="namespace">
+ <rule pattern="(?=\()">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(@?[_a-zA-Z]\w*|\.)+">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,331 @@
+<lexer>
+ <config>
+ <name>C++</name>
+ <alias>cpp</alias>
+ <alias>c++</alias>
+ <filename>*.cpp</filename>
+ <filename>*.hpp</filename>
+ <filename>*.c++</filename>
+ <filename>*.h++</filename>
+ <filename>*.cc</filename>
+ <filename>*.hh</filename>
+ <filename>*.cxx</filename>
+ <filename>*.hxx</filename>
+ <filename>*.C</filename>
+ <filename>*.H</filename>
+ <filename>*.cp</filename>
+ <filename>*.CPP</filename>
+ <filename>*.tpp</filename>
+ <mime_type>text/x-c++hdr</mime_type>
+ <mime_type>text/x-c++src</mime_type>
+ <ensure_nl>true</ensure_nl>
+ <analyse first="true">
+ <regex pattern="#include <[a-z_]+>" score="0.2" />
+ <regex pattern="using namespace " score="0.4" />
+ </analyse>
+ </config>
+ <rules>
+ <state name="classname">
+ <rule pattern="(\[\[.+\]\])(\s*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s*(?=[>{])">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="^#if\s+0">
+ <token type="CommentPreproc"/>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^#">
+ <token type="CommentPreproc"/>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(\n|[\w\W]*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="macro">
+ <rule pattern="(include)(\s+)("[^"]+?"|<[^>]+?>)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="CommentPreprocFile"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^/\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=\\)\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="statements">
+ <rule pattern="(reinterpret_cast|static_assert|thread_local|dynamic_cast|static_cast|const_cast|co_return|protected|namespace|consteval|constexpr|typename|co_await|co_yield|operator|restrict|explicit|template|override|noexcept|requires|decltype|alignof|private|alignas|virtual|mutable|nullptr|concept|export|friend|typeid|throws|public|delete|final|throw|catch|using|this|new|try)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(enum)\b(\s+)(class)\b(\s*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(class|struct|enum|union)\b(\s*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="\[\[.+\]\]">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(R)(")([^\\()\s]{,16})(\()((?:.|\n)*?)(\)\3)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ <token type="LiteralStringDelimiter"/>
+ <token type="LiteralStringDelimiter"/>
+ <token type="LiteralString"/>
+ <token type="LiteralStringDelimiter"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(u8|u|U)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(L?)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX]([0-9A-Fa-f]('?[0-9A-Fa-f]+)*)[LlUu]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0('?[0-7]+)+[LlUu]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[Bb][01]('?[01]+)*[LlUu]*">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="[0-9]('?[0-9]+)*[LlUu]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(restricted|volatile|continue|register|default|typedef|struct|extern|switch|sizeof|static|return|union|while|const|break|goto|enum|else|case|auto|for|asm|if|do)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(bool|int|long|float|short|double|char((8|16|32)_t)?|wchar_t|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(typename|__inline|restrict|_inline|thread|inline|naked)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(__m(128i|128d|128|64))\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="__(forceinline|identifier|unaligned|declspec|fastcall|stdcall|finally|except|assume|int32|cdecl|int64|based|leave|int16|raise|noop|int8|w64|try|asm)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false|NULL)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(:)(?!:)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="if0">
+ <rule pattern="^\s*#if.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <push/>
+ </rule>
+ <rule pattern="^\s*#el(?:se|if).*\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="^\s*#endif.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".*?\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="function"/>
+ </rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <push state="statement"/>
+ </rule>
+ <rule pattern="__(multiple_inheritance|virtual_inheritance|single_inheritance|interface|uuidof|super|event)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="__(offload|blockingoffload|outer)\b">
+ <token type="KeywordPseudo"/>
+ </rule>
+ </state>
+ <state name="statement">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern="[{]">
+ <token type="Punctuation"/>
+ <push state="root"/>
+ </rule>
+ <rule pattern="[;}]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,260 @@
+<lexer>
+ <config>
+ <name>C</name>
+ <alias>c</alias>
+ <filename>*.c</filename>
+ <filename>*.h</filename>
+ <filename>*.idc</filename>
+ <filename>*.x[bp]m</filename>
+ <mime_type>text/x-chdr</mime_type>
+ <mime_type>text/x-csrc</mime_type>
+ <mime_type>image/x-xbitmap</mime_type>
+ <mime_type>image/x-xpixmap</mime_type>
+ <ensure_nl>true</ensure_nl>
+ <analyse first="true" >
+ <regex pattern="(?m)^\s*#include <" score="0.1" />
+ <regex pattern="(?m)^\s*#ifn?def " score="0.1" />
+ </analyse>
+ </config>
+ <rules>
+ <state name="statement">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="macro">
+ <rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="CommentPreprocFile"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^/\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=\\)\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="if0">
+ <rule pattern="^\s*#if.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <push/>
+ </rule>
+ <rule pattern="^\s*#el(?:se|if).*\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="^\s*#endif.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".*?\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="^#if\s+0">
+ <token type="CommentPreproc"/>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^#">
+ <token type="CommentPreproc"/>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(\n|[\w\W]*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="statements">
+ <rule pattern="(L?)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+[LlUu]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[0-7]+[LlUu]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="\d+[LlUu]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(restricted|volatile|continue|register|default|typedef|struct|extern|switch|sizeof|static|return|union|while|const|break|goto|enum|else|case|auto|for|asm|if|do)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(bool|int|long|float|short|double|char((8|16|32)_t)?|unsigned|signed|void|u?int(_fast|_least|)(8|16|32|64)_t)\b|\b[a-z]\w*_t\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(typename|__inline|restrict|_inline|thread|inline|naked)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(__m(128i|128d|128|64))\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="__(forceinline|identifier|unaligned|declspec|fastcall|finally|stdcall|wchar_t|assume|except|int32|cdecl|int16|leave|based|raise|int64|noop|int8|w64|try|asm)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false|NULL)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(:)(?!:)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b[A-Za-z_]\w*(?=\s*\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="function"/>
+ </rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <push state="statement"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,122 @@
+<lexer>
+ <config>
+ <name>Cap'n Proto</name>
+ <alias>capnp</alias>
+ <filename>*.capnp</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="#.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="@[0-9a-zA-Z]*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="=">
+ <token type="Literal"/>
+ <push state="expression"/>
+ </rule>
+ <rule pattern=":">
+ <token type="NameClass"/>
+ <push state="type"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="NameAttribute"/>
+ <push state="annotation"/>
+ </rule>
+ <rule pattern="(struct|enum|interface|union|import|using|const|annotation|extends|in|of|on|as|with|from|fixed)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[\w.]+">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[^#@=:$\w]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="type">
+ <rule pattern="[^][=;,(){}$]+">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="[[(]">
+ <token type="NameClass"/>
+ <push state="parentype"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="parentype">
+ <rule pattern="[^][;()]+">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="[[(]">
+ <token type="NameClass"/>
+ <push/>
+ </rule>
+ <rule pattern="[])]">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="expression">
+ <rule pattern="[^][;,(){}$]+">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="[[(]">
+ <token type="Literal"/>
+ <push state="parenexp"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="parenexp">
+ <rule pattern="[^][;()]+">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="[[(]">
+ <token type="Literal"/>
+ <push/>
+ </rule>
+ <rule pattern="[])]">
+ <token type="Literal"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="annotation">
+ <rule pattern="[^][;,(){}=:]+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="[[(]">
+ <token type="NameAttribute"/>
+ <push state="annexp"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="annexp">
+ <rule pattern="[^][;()]+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="[[(]">
+ <token type="NameAttribute"/>
+ <push/>
+ </rule>
+ <rule pattern="[])]">
+ <token type="NameAttribute"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,137 @@
+<lexer>
+ <config>
+ <name>Cassandra CQL</name>
+ <alias>cassandra</alias>
+ <alias>cql</alias>
+ <filename>*.cql</filename>
+ <mime_type>text/x-cql</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="[^']+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="''">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="quoted-ident">
+ <rule pattern="[^"]+">
+ <token type="LiteralStringName"/>
+ </rule>
+ <rule pattern="""">
+ <token type="LiteralStringName"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringName"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="dollar-string">
+ <rule pattern="[^\$]+">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="\$\$">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="(--|\/\/).*\n?">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comments"/>
+ </rule>
+ <rule pattern="(ascii|bigint|blob|boolean|counter|date|decimal|double|float|frozen|inet|int|list|map|set|smallint|text|time|timestamp|timeuuid|tinyint|tuple|uuid|varchar|varint)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(DURABLE_WRITES|LOCAL_QUORUM|MATERIALIZED|COLUMNFAMILY|REPLICATION|NORECURSIVE|NOSUPERUSER|PERMISSIONS|EACH_QUORUM|CONSISTENCY|PERMISSION|CLUSTERING|WRITETIME|SUPERUSER|KEYSPACES|AUTHORIZE|LOCAL_ONE|AGGREGATE|FINALFUNC|PARTITION|FILTERING|UNLOGGED|CONTAINS|DISTINCT|FUNCTION|LANGUAGE|INFINITY|INITCOND|TRUNCATE|KEYSPACE|PASSWORD|REPLACE|OPTIONS|TRIGGER|STORAGE|ENTRIES|RETURNS|COMPACT|PRIMARY|EXISTS|STATIC|PAGING|UPDATE|CUSTOM|VALUES|INSERT|DELETE|MODIFY|CREATE|SELECT|SCHEMA|LOGGED|REVOKE|RENAME|QUORUM|CALLED|STYPE|ORDER|ALTER|BATCH|BEGIN|COUNT|ROLES|APPLY|WHERE|SFUNC|LEVEL|INPUT|LOGIN|INDEX|TABLE|THREE|ALLOW|TOKEN|LIMIT|USING|USERS|GRANT|FROM|KEYS|JSON|USER|INTO|ROLE|TYPE|VIEW|DESC|WITH|DROP|FULL|ASC|TTL|OFF|PER|KEY|USE|ADD|NAN|ONE|ALL|ANY|TWO|AND|NOT|AS|IN|IF|OF|IS|ON|TO|BY|OR)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[+*/<>=~!@#%^&|`?-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)">
+ <usingbygroup>
+ <sublexer_name_group>1</sublexer_name_group>
+ <code_group>6</code_group>
+ <emitters>
+ <token type="NameBuiltin"/>
+ <token type="TextWhitespace"/>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="LiteralStringHeredoc"/>
+ <token type="LiteralStringHeredoc"/>
+ <token type="LiteralStringHeredoc"/>
+ </emitters>
+ </usingbygroup>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="0x[0-9a-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\.[0-9]+(e[+-]?[0-9]+)?">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="-?[0-9]+(\.[0-9])?(e[+-]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringName"/>
+ <push state="quoted-ident"/>
+ </rule>
+ <rule pattern="\$\$">
+ <token type="LiteralStringHeredoc"/>
+ <push state="dollar-string"/>
+ </rule>
+ <rule pattern="[a-z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern=":(['"]?)[a-z]\w*\b\1">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[;:()\[\]\{\},.]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="multiline-comments">
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comments"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^/*]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="[/*]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,151 @@
+<lexer>
+ <config>
+ <name>Ceylon</name>
+ <alias>ceylon</alias>
+ <filename>*.ceylon</filename>
+ <mime_type>text/x-ceylon</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="class">
+ <rule pattern="[A-Za-z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="[a-z][\w.]*">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="(shared|abstract|formal|default|actual|variable|deprecated|small|late|literal|doc|by|see|throws|optional|license|tagged|final|native|annotation|sealed)\b">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="(break|case|catch|continue|else|finally|for|in|if|return|switch|this|throw|try|while|is|exists|dynamic|nonempty|then|outer|assert|let)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(abstracts|extends|satisfies|super|given|of|out|assign)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(function|value|void|new)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(assembly|module|package)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(class|interface|object|alias)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="(import)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="".*``.*``.*"">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="(\.)([a-z_]\w*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameAttribute"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[~^*!%&\[\](){}<>|+=:;,./?-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="#[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\$([01]{4})(_[01]{4})+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="\$[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="\d{1,3}(_\d{3})+[kMGTP]?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[0-9]+[kMGTP]?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,197 @@
+<lexer>
+ <config>
+ <name>CFEngine3</name>
+ <alias>cfengine3</alias>
+ <alias>cf3</alias>
+ <filename>*.cf</filename>
+ </config>
+ <rules>
+ <state name="interpol">
+ <rule pattern="\$[{(]">
+ <token type="LiteralStringInterpol"/>
+ <push/>
+ </rule>
+ <rule pattern="[})]">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^${()}]+">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ </state>
+ <state name="arglist">
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="#.*?\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="^@.*?\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(body)(\s+)(\S+)(\s+)(control)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(body|bundle|promise)(\s+)(\S+)(\s+)(\w+)(\()">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="arglist"/>
+ </rule>
+ <rule pattern="(body|bundle|promise)(\s+)(\S+)(\s+)(\w+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\S+)(\s*)(=>)(\s*)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([\w.!&|()"$]+)(::)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="doublequotestring"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="singlequotestring"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralString"/>
+ <push state="backtickstring"/>
+ </rule>
+ <rule pattern="(\w+)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\w+)(:)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="@[{(][^)}]+[})]">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\$[(][^)]+[)]">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[(){},;]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="=>">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="->">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\d+\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="doublequotestring">
+ <rule pattern="\$[{(]">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpol"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="singlequotestring">
+ <rule pattern="\$[{(]">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpol"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="backtickstring">
+ <rule pattern="\$[{(]">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpol"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,92 @@
+<lexer>
+ <config>
+ <name>cfstatement</name>
+ <alias>cfs</alias>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*(?:.|\n)*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\+\+|--">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[-+*/^&=!]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="<=|>=|<|>|==">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="mod\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(eq|lt|gt|lte|gte|not|is|and|or)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\|\||&&">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="'.*?'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(if|else|len|var|xml|default|break|switch|component|property|function|do|try|catch|in|continue|for|return|while|required|any|array|binary|boolean|component|date|guid|numeric|query|string|struct|uuid|case)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(application|session|client|cookie|super|this|variables|arguments)\b">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="([a-z_$][\w.]*)(\s*)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-z_$][\w.]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[()\[\]{};:,.\\]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="""">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="#.+?#">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="[^"#]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="#">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,134 @@
+<lexer>
+ <config>
+ <name>ChaiScript</name>
+ <alias>chai</alias>
+ <alias>chaiscript</alias>
+ <filename>*.chai</filename>
+ <mime_type>text/x-chaiscript</mime_type>
+ <mime_type>application/x-chaiscript</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="dqstring">
+ <rule pattern="\$\{[^"}]+?\}">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\\"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[^\\"$]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="^\#.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=/)">
+ <token type="Text"/>
+ <push state="#pop" state="badregex"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="badregex">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[=+\-*/]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(for|in|while|do|break|return|continue|if|else|throw|try|catch)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(var)\b">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(attr|def|fun)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(eval|throw)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="`\S+`">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[$a-zA-Z_]\w*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="dqstring"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,143 @@
+<lexer>
+ <config>
+ <name>Chapel</name>
+ <alias>chapel</alias>
+ <alias>chpl</alias>
+ <filename>*.chpl</filename>
+ </config>
+ <rules>
+ <state name="procname">
+ <rule pattern="([a-zA-Z_][.\w$]*|\~[a-zA-Z_][.\w$]*|[+*/!~%<>=&^|\-:]{1,2})">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="receivertype"/>
+ </rule>
+ <rule pattern="\)+\.">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="receivertype">
+ <rule pattern="(unmanaged|borrowed|atomic|single|shared|owned|sync)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(complex|nothing|opaque|string|locale|bytes|range|imag|real|bool|uint|void|int)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[^()]*">
+ <token type="NameOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(.*?)\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(config|const|inout|param|type|out|ref|var|in)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(false|none|true|nil)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(complex|nothing|opaque|string|locale|bytes|range|imag|real|bool|uint|void|int)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(implements|forwarding|prototype|otherwise|subdomain|primitive|unmanaged|override|borrowed|lifetime|coforall|continue|private|require|dmapped|cobegin|foreach|lambda|sparse|shared|domain|pragma|reduce|except|export|extern|throws|forall|delete|return|noinit|single|import|select|public|inline|serial|atomic|defer|break|local|index|throw|catch|label|begin|where|while|align|yield|owned|only|this|sync|with|scan|else|enum|init|when|then|let|for|try|use|new|zip|if|by|as|on|do)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(iter)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="procname"/>
+ </rule>
+ <rule pattern="(proc)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="procname"/>
+ </rule>
+ <rule pattern="(operator)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="procname"/>
+ </rule>
+ <rule pattern="(class|interface|module|record|union)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+\.\d*([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\.\d+([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+[Ee][-+]\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(\d*\.\d+)([eE][+-]?[0-9]+)?i?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+[eE][+-]?[0-9]+i?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[bB][01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[oO][0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|<=>|<~>|\.\.|by|#|\.\.\.|&&|\|\||!|&|\||\^|~|<<|>>|==|!=|<=|>=|<|>|[+\-*/%]|\*\*)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[:;,.?()\[\]{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[a-zA-Z_][\w$]*">
+ <token type="NameOther"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="[a-zA-Z_][\w$]*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,55 @@
+<lexer>
+ <config>
+ <name>Cheetah</name>
+ <alias>cheetah</alias>
+ <alias>spitfire</alias>
+ <filename>*.tmpl</filename>
+ <filename>*.spt</filename>
+ <mime_type>application/x-cheetah</mime_type>
+ <mime_type>application/x-spitfire</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(##[^\n]*)$">
+ <bygroups>
+ <token type="Comment"/>
+ </bygroups>
+ </rule>
+ <rule pattern="#[*](.|\n)*?[*]#">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="#end[^#\n]*(?:#|$)">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="#slurp$">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(#[a-zA-Z]+)([^#\n]*)(#|$)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <using lexer="Python"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$)([a-zA-Z_][\w.]*\w)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <using lexer="Python"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$\{!?)(.*?)(\})(?s)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <using lexer="Python"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?sx)
 (.+?) # anything, followed by:
 (?:
 (?=\#[#a-zA-Z]*) | # an eval comment
 (?=\$[a-zA-Z_{]) | # a substitution
 \Z # end of string
 )
 ">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,71 @@
+<lexer>
+ <config>
+ <name>Clojure</name>
+ <alias>clojure</alias>
+ <alias>clj</alias>
+ <alias>edn</alias>
+ <filename>*.clj</filename>
+ <filename>*.edn</filename>
+ <mime_type>text/x-clojure</mime_type>
+ <mime_type>application/x-clojure</mime_type>
+ <mime_type>application/edn</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern=";.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="[,\s]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="-?\d+\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="0x-?[abcdef\d]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'(?!#)[\w!$%*+<=>?/.#-]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="\\(.|[a-z]+)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="::?#?(?!#)[\w!$%*+<=>?/.#-]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="~@|[`\'#^~&@]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(quote|loop|new|var|let|def|if|do|fn|\.) ">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(definterface|defprotocol|defproject|defstruct|definline|defmethod|defrecord|defmulti|defmacro|defonce|declare|deftype|defn-|def-|defn|ns) ">
+ <token type="KeywordDeclaration"/>
+ </rule>
@@ -0,0 +1,90 @@
+<lexer>
+ <config>
+ <name>CMake</name>
+ <alias>cmake</alias>
+ <filename>*.cmake</filename>
+ <filename>CMakeLists.txt</filename>
+ <mime_type>text/x-cmake</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\b(\w+)([ \t]*)(\()">
+ <bygroups>
+ <token type="NameBuiltin"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="args"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="ws"/>
+ </rule>
+ </state>
+ <state name="args">
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(\$\{)(.+?)(\})">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameVariable"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$ENV\{)(.+?)(\})">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameVariable"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$<)(.+?)(>)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameVariable"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?s)".*?"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\\\S+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[^)$"# \t\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="ws"/>
+ </rule>
+ </state>
+ <state name="string"/>
+ <state name="keywords">
+ <rule pattern="\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|MSVC70|MSVC71|MSVC80|MSVC90)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="ws">
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#.*\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,90 @@
+<lexer>
+ <config>
+ <name>COBOL</name>
+ <alias>cobol</alias>
+ <filename>*.cob</filename>
+ <filename>*.COB</filename>
+ <filename>*.cpy</filename>
+ <filename>*.CPY</filename>
+ <mime_type>text/x-cobol</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="strings">
+ <rule pattern=""[^"\n]*("|\n)">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'[^'\n]*('|\n)">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="nums">
+ <rule pattern="\d+(\s*|\.$|$)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[+-]?\d*\.\d+(E[-+]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+-]?\d+\.\d*(E[-+]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="comment"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule>
+ <include state="core"/>
+ </rule>
+ <rule>
+ <include state="nums"/>
+ </rule>
+ <rule pattern="[a-z0-9]([\w\-]*[a-z0-9]+)?">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="(^.{6}[*/].*\n|^.{6}|\*>.*\n)">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="core">
+ <rule pattern="(^|(?<=[^\w\-]))(ALL\s+)?((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)\s*($|(?=[^\w\-]))">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="(^|(?<=[^\w\-]))(WORKING-STORAGE|IDENTIFICATION|LOCAL-STORAGE|CONFIGURATION|END-EVALUATE|FILE-CONTROL|END-UNSTRING|END-SUBTRACT|END-MULTIPLY|INPUT-OUTPUT|END-PERFORM|END-DISPLAY|END-OF-PAGE|END-COMPUTE|ENVIRONMENT|I-O-CONTROL|END-REWRITE|END-RETURN|INITIALIZE|END-ACCEPT|END-DIVIDE|PROGRAM-ID|END-STRING|END-DELETE|END-SEARCH|END-WRITE|PROCEDURE|END-START|TERMINATE|END-READ|MULTIPLY|CONTINUE|SUPPRESS|SUBTRACT|INITIATE|UNSTRING|DIVISION|VALIDATE|END-CALL|ALLOCATE|GENERATE|EVALUATE|PERFORM|FOREVER|LINKAGE|END-ADD|REWRITE|INSPECT|SECTION|RELEASE|COMPUTE|DISPLAY|END-IF|GOBACK|INVOKE|CANCEL|UNLOCK|SCREEN|SEARCH|DELETE|STRING|DIVIDE|ACCEPT|RETURN|RESUME|START|RAISE|MERGE|CLOSE|WRITE|FILE|STOP|FREE|READ|ELSE|THEN|SORT|EXIT|OPEN|CALL|MOVE|DATA|END|SET|ADD|USE|GO|FD|SD|IF)\s*($|(?=[^\w\-]))">
+ <token type="KeywordReserved"/>
+ </rule>
@@ -0,0 +1,210 @@
+<lexer>
+ <config>
+ <name>CoffeeScript</name>
+ <alias>coffee-script</alias>
+ <alias>coffeescript</alias>
+ <alias>coffee</alias>
+ <filename>*.coffee</filename>
+ <mime_type>text/coffeescript</mime_type>
+ <dot_all>true</dot_all>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="###[^#].*?###">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="#(?!##[^#]).*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="multilineregex">
+ <rule pattern="[^/#]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="///([gim]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpoling_string"/>
+ </rule>
+ <rule pattern="[/#]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="///">
+ <token type="LiteralStringRegex"/>
+ <push state="#pop" state="multilineregex"/>
+ </rule>
+ <rule pattern="/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="Operator"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="tsqs">
+ <rule pattern="'''">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="#|\\.|\'|"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ </state>
+ <state name="dqs">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\.|\'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpoling_string"/>
+ </rule>
+ <rule pattern="#">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ </state>
+ <state name="sqs">
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="#|\\.|"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ </state>
+ <state name="tdqs">
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\.|\'|"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpoling_string"/>
+ </rule>
+ <rule pattern="#">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="^(?=\s|/)">
+ <token type="Text"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?(?!>)|!=?|=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(?:\([^()]*\))?\s*[=-]>">
+ <token type="NameFunction"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(?<![.$])(for|own|in|of|while|until|loop|break|return|continue|switch|when|then|if|unless|else|throw|try|catch|finally|new|delete|typeof|instanceof|super|extends|this|class|by)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(?<![.$])(true|false|yes|no|on|off|null|NaN|Infinity|undefined)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[$a-zA-Z_][\w.:$]*\s*[:=]\s">
+ <token type="NameVariable"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="@[$a-zA-Z_][\w.:$]*\s*[:=]\s">
+ <token type="NameVariableInstance"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="@">
+ <token type="NameOther"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="@?[$a-zA-Z_][\w$]*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="tdqs"/>
+ </rule>
+ <rule pattern="'''">
+ <token type="LiteralString"/>
+ <push state="tsqs"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="dqs"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="sqs"/>
+ </rule>
+ </state>
+ <state name="interpoling_string">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="strings">
+ <rule pattern="[^#\\\'"]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,184 @@
+<lexer>
+ <config>
+ <name>Common Lisp</name>
+ <alias>common-lisp</alias>
+ <alias>cl</alias>
+ <alias>lisp</alias>
+ <filename>*.cl</filename>
+ <filename>*.lisp</filename>
+ <mime_type>text/x-common-lisp</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="body">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="#\|">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comment"/>
+ </rule>
+ <rule pattern="#\d*Y.*$">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern=""(\\.|\\\n|[^"\\])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=":(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="::(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern=":#(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="'(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="'">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="`">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[-+]?\d+\.?(?=[ "()\'\n,;`])">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[-+]?\d+/\d+(?=[ "()\'\n,;`])">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)(?=[ "()\'\n,;`])">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="#\\.(?=[ "()\'\n,;`])">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="#\\(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="#\(">
+ <token type="Operator"/>
+ <push state="body"/>
+ </rule>
+ <rule pattern="#\d*\*[01]*">
+ <token type="LiteralOther"/>
+ </rule>
+ <rule pattern="#:(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="#[.,]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="#\'">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="#b[+-]?[01]+(/[01]+)?">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="#o[+-]?[0-7]+(/[0-7]+)?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="#x[+-]?[0-9a-f]+(/[0-9a-f]+)?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="#\d+r[+-]?[0-9a-z]+(/[0-9a-z]+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(#c)(\()">
+ <bygroups>
+ <token type="LiteralNumber"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="body"/>
+ </rule>
+ <rule pattern="(#\d+a)(\()">
+ <bygroups>
+ <token type="LiteralOther"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="body"/>
+ </rule>
+ <rule pattern="(#s)(\()">
+ <bygroups>
+ <token type="LiteralOther"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="body"/>
+ </rule>
+ <rule pattern="#p?"(\\.|[^"])*"">
+ <token type="LiteralOther"/>
+ </rule>
+ <rule pattern="#\d+=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="#\d+#">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="#+nil(?=[ "()\'\n,;`])\s*\(">
+ <token type="CommentPreproc"/>
+ <push state="commented-form"/>
+ </rule>
+ <rule pattern="#[+-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(,@|,|\.)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(t|nil)(?=[ "()\'\n,;`])">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\*(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)\*">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="(\|[^|]+\||(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~])(?:\\.|[\w!$%&*+-/<=>?@\[\]^{}~]|[#.:])*)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="body"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <push state="body"/>
+ </rule>
+ </state>
+ <state name="multiline-comment">
+ <rule pattern="#\|">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\|#">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^|#]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="[|#]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="commented-form">
+ <rule pattern="\(">
+ <token type="CommentPreproc"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^()]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,136 @@
+<lexer>
+ <config>
+ <name>Coq</name>
+ <alias>coq</alias>
+ <filename>*.v</filename>
+ <mime_type>text/x-coq</mime_type>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="[^"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="""">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="dotted">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[A-Z][\w\']*(?=\s*\.)">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="[A-Z][\w\']*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-z][a-z0-9_\']*">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="false|true|\(\)|\[\]">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\b(Projections|Monomorphic|Polymorphic|Proposition|CoInductive|Hypothesis|CoFixpoint|Contextual|Definition|Parameters|Hypotheses|Structure|Inductive|Corollary|Implicits|Parameter|Variables|Arguments|Canonical|Printing|Coercion|Reserved|Universe|Notation|Instance|Fixpoint|Variable|Morphism|Relation|Existing|Implicit|Example|Theorem|Delimit|Defined|Rewrite|outside|Require|Resolve|Section|Context|Prenex|Strict|Module|Import|Export|Global|inside|Remark|Tactic|Search|Record|Scope|Unset|Check|Local|Close|Class|Graph|Proof|Lemma|Print|Axiom|Show|Goal|Open|Fact|Hint|Bind|Ltac|Save|View|Let|Set|All|End|Qed)\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="\b(exists2|nosimpl|struct|exists|return|forall|match|cofix|then|with|else|for|fix|let|fun|end|is|of|if|in|as)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(Type|Prop)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\b(native_compute|setoid_rewrite|etransitivity|econstructor|transitivity|autorewrite|constructor|cutrewrite|vm_compute|bool_congr|generalize|inversion|induction|injection|nat_congr|intuition|destruct|suffices|erewrite|symmetry|nat_norm|replace|rewrite|compute|pattern|trivial|without|assert|unfold|change|eapply|intros|unlock|revert|rename|refine|eauto|tauto|after|right|congr|split|field|simpl|intro|clear|apply|using|subst|case|left|suff|loss|wlog|have|fold|ring|move|lazy|elim|pose|auto|red|cbv|hnf|cut|set)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(contradiction|discriminate|reflexivity|assumption|congruence|romega|omega|exact|solve|tauto|done|by)\b">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="\b(repeat|first|idtac|last|try|do)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="\b([A-Z][\w\']*)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="(λ|Π|\|\}|\{\||\\/|/\\|=>|~|\}|\|]|\||\{<|\{|`|_|]|\[\||\[>|\[<|\[|\?\?|\?|>\}|>]|>|=|<->|<-|<|;;|;|:>|:=|::|:|\.\.|\.|->|-\.|-|,|\+|\*|\)|\(|&&|&|#|!=)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(unit|nat|bool|string|ascii|list)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[^\W\d][\w']*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\d[\d_]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="0[xX][\da-fA-F][\da-fA-F_]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[oO][0-7][0-7_]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[bB][01][01_]*">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'.'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[~?][a-z][\w\']*:">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^(*)]+">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="Comment"/>
+ <push/>
+ </rule>
+ <rule pattern="\*\)">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[(*)]">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,762 @@
+<lexer>
+ <config>
+ <name>Crystal</name>
+ <alias>cr</alias>
+ <alias>crystal</alias>
+ <filename>*.cr</filename>
+ <mime_type>text/x-crystal</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="pa-intp-string">
+ <rule pattern="\\[\(]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[\\#()]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#()]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="ab-regex">
+ <rule pattern="\\[\\<>]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="<">
+ <token type="LiteralStringRegex"/>
+ <push/>
+ </rule>
+ <rule pattern=">[imsx]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#<>]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#<>]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="cb-regex">
+ <rule pattern="\\[\\{}]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="LiteralStringRegex"/>
+ <push/>
+ </rule>
+ <rule pattern="\}[imsx]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#{}]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#{}]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="simple-backtick">
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[^\\`#]+">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string-intp">
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="in-intp"/>
+ </rule>
+ </state>
+ <state name="interpolated-regex">
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="cb-string">
+ <rule pattern="\\[\\{}]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\\#{}]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#{}]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="in-macro-control">
+ <rule pattern="\{%">
+ <token type="LiteralStringInterpol"/>
+ <push/>
+ </rule>
+ <rule pattern="%\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="for\b|in\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="interpolated-string">
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="in-macro-expr">
+ <rule pattern="\{\{">
+ <token type="LiteralStringInterpol"/>
+ <push/>
+ </rule>
+ <rule pattern="\}\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="simple-string">
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[^\\"#]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="cb-intp-string">
+ <rule pattern="\\[\{]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[\\#{}]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#{}]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="string-intp-escaped">
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule>
+ <include state="string-escaped"/>
+ </rule>
+ </state>
+ <state name="sb-regex">
+ <rule pattern="\\[\\\[\]]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="LiteralStringRegex"/>
+ <push/>
+ </rule>
+ <rule pattern="\][imsx]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#\[\]]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#\[\]]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="[A-Z_]\w*">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(\()(\s*)([A-Z_]\w*)(\s*)(\))">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string-escaped">
+ <rule pattern="\\([\\befnstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="sb-intp-string">
+ <rule pattern="\\[\[]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\]">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[\\#\[\]]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#\[\]]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="pa-regex">
+ <rule pattern="\\[\\()]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="LiteralStringRegex"/>
+ <push/>
+ </rule>
+ <rule pattern="\)[imsx]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#()]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#()]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="in-attr">
+ <rule pattern="\[">
+ <token type="Operator"/>
+ <push/>
+ </rule>
+ <rule pattern="\]">
+ <token type="Operator"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="ab-intp-string">
+ <rule pattern="\\[<]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="<">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern=">">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[\\#<>]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#<>]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="in-intp">
+ <rule pattern="\{">
+ <token type="LiteralStringInterpol"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="end-part">
+ <rule pattern=".+">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="#.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(instance_sizeof|pointerof|protected|abstract|require|private|include|unless|typeof|sizeof|return|extend|ensure|rescue|ifdef|super|break|begin|until|while|elsif|yield|next|when|else|then|case|with|end|asm|if|do|as|of)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(false|true|nil)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(module|lib)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(def|fun|macro)(\s+)((?:[a-zA-Z_]\w*::)*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="def(?=[*%&^`~+-/\[<>=])">
+ <token type="Keyword"/>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="(class|struct|union|type|alias|enum)(\s+)((?:[a-zA-Z_]\w*::)*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(self|out|uninitialized)\b|(is_a|responds_to)\?">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="(def_equals_and_hash|assert_responds_to|forward_missing_to|def_equals|property|def_hash|parallel|delegate|debugger|getter|record|setter|spawn|pp)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="getter[!?]|property[!?]|__(DIR|FILE|LINE)__\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="(?<!\.)(get_stack_top|StaticArray|Concurrent|with_color|Reference|Scheduler|read_line|Exception|at_exit|Pointer|Channel|Float64|sprintf|Float32|Process|Object|Struct|caller|UInt16|UInt32|UInt64|system|future|Number|printf|String|Symbol|Int32|Range|Slice|Regex|Mutex|sleep|Array|Class|raise|Tuple|Deque|delay|Float|Int16|print|abort|Value|UInt8|Int64|puts|Proc|File|Void|exit|fork|Bool|Char|gets|lazy|loop|main|rand|Enum|Int8|Time|Hash|Set|Box|Nil|Dir|Int|p)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="(<<-?)("|\')()(\2)(.*?\n)">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="__END__">
+ <token type="CommentPreproc"/>
+ <push state="end-part"/>
+ </rule>
+ <rule pattern="(?:^|(?<=[=<>~!:])|(?<=(?:\s|;)when\s)|(?<=(?:\s|;)or\s)|(?<=(?:\s|;)and\s)|(?<=\.index\s)|(?<=\.scan\s)|(?<=\.sub\s)|(?<=\.sub!\s)|(?<=\.gsub\s)|(?<=\.gsub!\s)|(?<=\.match\s)|(?<=(?:\s|;)if\s)|(?<=(?:\s|;)elsif\s)|(?<=^when\s)|(?<=^index\s)|(?<=^scan\s)|(?<=^sub\s)|(?<=^gsub\s)|(?<=^sub!\s)|(?<=^gsub!\s)|(?<=^match\s)|(?<=^if\s)|(?<=^elsif\s))(\s*)(/)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringRegex"/>
+ </bygroups>
+ <push state="multiline-regex"/>
+ </rule>
+ <rule pattern="(?<=\(|,|\[)/">
+ <token type="LiteralStringRegex"/>
+ <push state="multiline-regex"/>
+ </rule>
+ <rule pattern="(\s+)(/)(?![\s=])">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringRegex"/>
+ </bygroups>
+ <push state="multiline-regex"/>
+ </rule>
+ <rule pattern="(0o[0-7]+(?:_[0-7]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberOct"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberHex"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(0b[01]+(?:_[01]+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberBin"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)(?:e[+-]?[0-9]+)?(?:_?f[0-9]+)?)(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberFloat"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)(?:_?f[0-9]+)?)(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberFloat"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?:0(?![0-9])|[1-9][\d_]*)(?:\.\d[\d_]*)?(?:e[+-]?[0-9]+)?(?:_?f[0-9]+))(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberFloat"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(0\b|[1-9][\d]*(?:_\d+)*(?:_?[iu][0-9]+)?)\b(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberInteger"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="@@[a-zA-Z_]\w*">
+ <token type="NameVariableClass"/>
+ </rule>
+ <rule pattern="@[a-zA-Z_]\w*">
+ <token type="NameVariableInstance"/>
+ </rule>
+ <rule pattern="\$\w+">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="\$[!@&`\'+~=/\\,;.<>_*$?:"^-]">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="\$-[0adFiIlpvw]">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="::">
+ <token type="Operator"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule pattern="\?(\\[MC]-)*(\\([\\befnrtv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="[A-Z][A-Z_]+\b">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\{%">
+ <token type="LiteralStringInterpol"/>
+ <push state="in-macro-control"/>
+ </rule>
+ <rule pattern="\{\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="in-macro-expr"/>
+ </rule>
+ <rule pattern="(@\[)(\s*)([A-Z]\w*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="NameDecorator"/>
+ </bygroups>
+ <push state="in-attr"/>
+ </rule>
+ <rule pattern="(\.|::)(\[\]\?|<=>|===|\[\]=|>>|&&|\*\*|\[\]|\|\||>=|=~|!~|<<|<=|!=|==|<|/|=|-|\+|>|\*|&|%|\^|!|\||~)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameOperator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Name"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*(?:[!?](?!=))?">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="(\[|\]\??|\*\*|<=>?|>=|<<?|>>?|=~|===|!~|&&?|\|\||\.{1,3})">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[-+/*%=<>&!^|~]=?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[(){};,/?:\\]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="multiline-regex">
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\\/">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\/#]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="/[imsx]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="ab-string">
+ <rule pattern="\\[\\<>]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="<">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern=">">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\\#<>]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#<>]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="pa-string">
+ <rule pattern="\\[\\()]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\\#()]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#()]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="strings">
+ <rule pattern="\:@{0,2}[a-zA-Z_]\w*[!?]?">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="\:@{0,2}(\[\]\?|<=>|===|\[\]=|>>|&&|\*\*|\[\]|\|\||>=|=~|!~|<<|<=|!=|==|<|/|=|-|\+|>|\*|&|%|\^|!|\||~)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern=":'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^']|\\[^'\\]+)'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern=":"">
+ <token type="LiteralStringSymbol"/>
+ <push state="simple-sym"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(:)(?!:)">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="simple-string"/>
+ </rule>
+ <rule pattern="(?<!\.)`">
+ <token type="LiteralStringBacktick"/>
+ <push state="simple-backtick"/>
+ </rule>
+ <rule pattern="%\{">
+ <token type="LiteralStringOther"/>
+ <push state="cb-intp-string"/>
+ </rule>
+ <rule pattern="%[wi]\{">
+ <token type="LiteralStringOther"/>
+ <push state="cb-string"/>
+ </rule>
+ <rule pattern="%r\{">
+ <token type="LiteralStringRegex"/>
+ <push state="cb-regex"/>
+ </rule>
+ <rule pattern="%\[">
+ <token type="LiteralStringOther"/>
+ <push state="sb-intp-string"/>
+ </rule>
+ <rule pattern="%[wi]\[">
+ <token type="LiteralStringOther"/>
+ <push state="sb-string"/>
+ </rule>
+ <rule pattern="%r\[">
+ <token type="LiteralStringRegex"/>
+ <push state="sb-regex"/>
+ </rule>
+ <rule pattern="%\(">
+ <token type="LiteralStringOther"/>
+ <push state="pa-intp-string"/>
+ </rule>
+ <rule pattern="%[wi]\(">
+ <token type="LiteralStringOther"/>
+ <push state="pa-string"/>
+ </rule>
+ <rule pattern="%r\(">
+ <token type="LiteralStringRegex"/>
+ <push state="pa-regex"/>
+ </rule>
+ <rule pattern="%<">
+ <token type="LiteralStringOther"/>
+ <push state="ab-intp-string"/>
+ </rule>
+ <rule pattern="%[wi]<">
+ <token type="LiteralStringOther"/>
+ <push state="ab-string"/>
+ </rule>
+ <rule pattern="%r<">
+ <token type="LiteralStringRegex"/>
+ <push state="ab-regex"/>
+ </rule>
+ <rule pattern="(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[imsx]*)">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(%[wi]([\W_]))((?:\\\2|(?!\2).)*)(\2)">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringOther"/>
+ <token type="None"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringOther"/>
+ <token type="None"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(%([\[{(<]))((?:\\\2|(?!\2).)*)(\2)">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="sb-string">
+ <rule pattern="\\[\\\[\]]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\]">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\\#\[\]]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#\[\]]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="funcname">
+ <rule pattern="(?:([a-zA-Z_]\w*)(\.))?([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Operator"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="simple-sym">
+ <rule>
+ <include state="string-escaped"/>
+ </rule>
+ <rule pattern="[^\\"#]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringSymbol"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,323 @@
+<lexer>
+ <config>
+ <name>CSS</name>
+ <alias>css</alias>
+ <filename>*.css</filename>
+ <mime_type>text/css</mime_type>
+ </config>
+ <rules>
+ <state name="numeric-end">
+ <rule pattern="(vmin|grad|vmax|turn|dppx|dpcm|kHz|dpi|rad|rem|deg|vw|vh|ch|px|mm|cm|in|pt|pc|Hz|ex|em|ms|q|s)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="%">
+ <token type="KeywordType"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="atrule">
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="atcontent"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="basics"/>
+ </rule>
+ </state>
+ <state name="atcontent">
+ <rule>
+ <include state="basics"/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="2"/>
+ </rule>
+ </state>
+ <state name="common-values">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(-webkit-|-khtml-|prince-|-atsc-|-moz-|-rim-|-wap-|-ms-|-xv-|mso-|-ah-|-hp-|-ro-|-tc-|-o-)">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule>
+ <include state="urls"/>
+ </rule>
+ <rule pattern="(attr|blackness|blend|blenda|blur|brightness|calc|circle|color-mod|contrast|counter|cubic-bezier|device-cmyk|drop-shadow|ellipse|gray|grayscale|hsl|hsla|hue|hue-rotate|hwb|image|inset|invert|lightness|linear-gradient|matrix|matrix3d|opacity|perspective|polygon|radial-gradient|rect|repeating-linear-gradient|repeating-radial-gradient|rgb|rgba|rotate|rotate3d|rotateX|rotateY|rotateZ|saturate|saturation|scale|scale3d|scaleX|scaleY|scaleZ|sepia|shade|skewX|skewY|steps|tint|toggle|translate|translate3d|translateX|translateY|translateZ|whiteness)(\()">
+ <bygroups>
+ <token type="NameBuiltin"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="function-start"/>
+ </rule>
+ <rule pattern="([a-zA-Z_][\w-]+)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="function-start"/>
+ </rule>
@@ -0,0 +1,53 @@
+<!--
+Lexer for RFC-4180 compliant CSV subject to the following additions:
+- UTF-8 encoding is accepted (the RFC requires 7-bit ASCII)
+- The line terminator character can be LF or CRLF (the RFC allows CRLF only)
+
+Link to the RFC-4180 specification: https://tools.ietf.org/html/rfc4180
+
+Additions inspired by:
+https://github.com/frictionlessdata/datapackage/issues/204#issuecomment-193242077
+
+Future improvements:
+- Identify non-quoted numbers as LiteralNumber
+- Identify y as an error in "x"y. Currently it's identified as another string
+ literal.
+-->
+
+<lexer>
+ <config>
+ <name>CSV</name>
+ <alias>csv</alias>
+ <filename>*.csv</filename>
+ <mime_type>text/csv</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\r?\n">
+ <token type="Punctuation" />
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation" />
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble" />
+ <push state="escaped" />
+ </rule>
+ <rule pattern="[^\r\n,]+">
+ <token type="LiteralString" />
+ </rule>
+ </state>
+ <state name="escaped">
+ <rule pattern="""">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble" />
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^"]+">
+ <token type="LiteralStringDouble" />
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,85 @@
+<lexer>
+ <config>
+ <name>CUE</name>
+ <alias>cue</alias>
+ <filename>*.cue</filename>
+ <mime_type>text/x-cue</mime_type>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//[^\n\r]+">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(\+|&&|==|<|=|-|\|\||!=|>|:|\*|&|=~|<=|\?|\[|\]|,|/|\||!~|>=|!|_\|_|\.\.\.)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="#*"+">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'\n])*['\n]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="0[boxX][0-9a-fA-F][_0-9a-fA-F]*|(\.\d+|\d[_\d]*(\.\d*)?)([eE][+-]?\d+)?[KMGTP]?i?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[~!%^&*()+=|\[\]:;,.<>/?-]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(import|for|if|in|let|package)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(bool|float|int|string|uint|ulong|ushort)\b\??">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(true|false|null|_)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[@#]?[_a-zA-Z$]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="\\#*\(">
+ <token type="LiteralStringInterpol"/>
+ <push state="string-intp"/>
+ </rule>
+ <rule pattern=""+#*">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="string-intp">
+ <rule pattern="\)">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,372 @@
+<lexer>
+ <config>
+ <name>Cython</name>
+ <alias>cython</alias>
+ <alias>pyx</alias>
+ <alias>pyrex</alias>
+ <filename>*.pyx</filename>
+ <filename>*.pxd</filename>
+ <filename>*.pxi</filename>
+ <mime_type>text/x-cython</mime_type>
+ <mime_type>application/x-cython</mime_type>
+ </config>
+ <rules>
+ <state name="funcname">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^(\s*)("""(?:.|\n)*?""")">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringDoc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)('''(?:.|\n)*?''')">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringDoc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="[]{}:(),;[]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(in|is|and|or|not)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(<)([a-zA-Z0-9.?]+)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="KeywordType"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="!=|==|<<|>>|[-~+/*%=<>&^|.?]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(from)(\d+)(<=)(\s+)(<)(\d+)(:)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="LiteralNumberInteger"/>
+ <token type="Operator"/>
+ <token type="Name"/>
+ <token type="Operator"/>
+ <token type="Name"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule pattern="(def|property)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="(cp?def)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="cdef"/>
+ </rule>
+ <rule pattern="(cdef)(:)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(class|struct)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(from)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="fromimport"/>
+ </rule>
+ <rule pattern="(c?import)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule>
+ <include state="builtins"/>
+ </rule>
+ <rule>
+ <include state="backtick"/>
+ </rule>
+ <rule pattern="(?:[rR]|[uU][rR]|[rR][uU])"""">
+ <token type="LiteralString"/>
+ <push state="tdqs"/>
+ </rule>
+ <rule pattern="(?:[rR]|[uU][rR]|[rR][uU])'''">
+ <token type="LiteralString"/>
+ <push state="tsqs"/>
+ </rule>
+ <rule pattern="(?:[rR]|[uU][rR]|[rR][uU])"">
+ <token type="LiteralString"/>
+ <push state="dqs"/>
+ </rule>
+ <rule pattern="(?:[rR]|[uU][rR]|[rR][uU])'">
+ <token type="LiteralString"/>
+ <push state="sqs"/>
+ </rule>
+ <rule pattern="[uU]?"""">
+ <token type="LiteralString"/>
+ <combined state="stringescape" state="tdqs"/>
+ </rule>
+ <rule pattern="[uU]?'''">
+ <token type="LiteralString"/>
+ <combined state="stringescape" state="tsqs"/>
+ </rule>
+ <rule pattern="[uU]?"">
+ <token type="LiteralString"/>
+ <combined state="stringescape" state="dqs"/>
+ </rule>
+ <rule pattern="[uU]?'">
+ <token type="LiteralString"/>
+ <combined state="stringescape" state="sqs"/>
+ </rule>
+ <rule>
+ <include state="name"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ </state>
+ <state name="stringescape">
+ <rule pattern="\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="strings">
+ <rule pattern="%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="[^\\\'"%\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[\'"\\]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="%">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="backtick">
+ <rule pattern="`.*?`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0\d+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][a-fA-F0-9]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d+L">
+ <token type="LiteralNumberIntegerLong"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(continue|ctypedef|except\?|include|finally|global|return|lambda|assert|except|print|nogil|while|fused|yield|break|raise|exec|else|elif|pass|with|gil|for|try|del|by|as|if)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(DEF|IF|ELIF|ELSE)\b">
+ <token type="CommentPreproc"/>
+ </rule>
+ </state>
+ <state name="fromimport">
+ <rule pattern="(\s+)(c?import)\b">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-zA-Z_.][\w.]*">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="nl">
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="dqs">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\|\\"|\\\n">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ </state>
+ <state name="tsqs">
+ <rule pattern="'''">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule>
+ <include state="nl"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="(\s+)(as)(\s+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_][\w.]*">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="(\s*)(,)(\s*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="name">
+ <rule pattern="@\w+">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="cdef">
+ <rule pattern="(public|readonly|extern|api|inline)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(struct|enum|union|class)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(?=[(:#=]|$)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(,)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="from\b">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="as\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=["\'])">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="sqs">
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\|\\'|\\\n">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ </state>
+ <state name="tdqs">
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule>
+ <include state="nl"/>
+ </rule>
+ </state>
+ <state name="builtins">
+ <rule pattern="(?<!\.)(staticmethod|classmethod|__import__|issubclass|isinstance|basestring|bytearray|raw_input|frozenset|enumerate|property|unsigned|reversed|callable|execfile|hasattr|compile|complex|delattr|setattr|unicode|globals|getattr|reload|divmod|xrange|unichr|filter|reduce|buffer|intern|coerce|sorted|locals|object|round|input|range|super|tuple|bytes|float|slice|apply|bool|long|exit|vars|file|next|type|iter|open|dict|repr|hash|list|eval|oct|map|zip|int|hex|set|sum|chr|cmp|any|str|pow|ord|dir|len|min|all|abs|max|bin|id)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|NULL)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="(?<!\.)(PendingDeprecationWarning|UnicodeTranslateError|NotImplementedError|FloatingPointError|DeprecationWarning|UnicodeDecodeError|UnicodeEncodeError|UnboundLocalError|KeyboardInterrupt|ZeroDivisionError|IndentationError|EnvironmentError|OverflowWarning|ArithmeticError|RuntimeWarning|UnicodeWarning|AttributeError|AssertionError|NotImplemented|ReferenceError|StopIteration|SyntaxWarning|OverflowError|GeneratorExit|FutureWarning|BaseException|ImportWarning|StandardError|RuntimeError|UnicodeError|LookupError|ImportError|SyntaxError|MemoryError|SystemError|UserWarning|SystemExit|ValueError|IndexError|NameError|TypeError|Exception|KeyError|EOFError|TabError|OSError|Warning|IOError)\b">
+ <token type="NameException"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,133 @@
+<lexer>
+ <config>
+ <name>D</name>
+ <alias>d</alias>
+ <filename>*.d</filename>
+ <filename>*.di</filename>
+ <mime_type>text/x-d</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\+.*?\+/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(asm|assert|body|break|case|cast|catch|continue|default|debug|delete|do|else|finally|for|foreach|foreach_reverse|goto|if|in|invariant|is|macro|mixin|new|out|pragma|return|super|switch|this|throw|try|typeid|typeof|version|while|with)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="__(FILE|FILE_FULL_PATH|MODULE|LINE|FUNCTION|PRETTY_FUNCTION|DATE|EOF|TIME|TIMESTAMP|VENDOR|VERSION)__\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="__(traits|vector|parameters)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="@[\w.]*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="(abstract|auto|alias|align|const|delegate|deprecated|enum|export|extern|final|function|immutable|inout|lazy|nothrow|override|package|private|protected|public|pure|ref|scope|shared|static|synchronized|template|unittest|__gshared)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(void|bool|byte|ubyte|short|ushort|int|uint|long|ulong|cent|ucent|float|double|real|ifloat|idouble|ireal|cfloat|cdouble|creal|char|wchar|dchar)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(size_t|ptrdiff_t|noreturn|string|wstring|dstring|Object|Throwable|Exception|Error|imported)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(module)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(class|interface|struct|template|union)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="(import)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern="[qr]?"(\\\\|\\"|[^"])*"[cwd]?">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(`)([^`]*)(`)[cwd]?">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(\.)((?:[^\W\d]|\$)[\w$]*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameAttribute"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^\s*([^\W\d]|\$)[\w$]*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFL]?i?|[0-9][eE][+\-]?[0-9][0-9_]*[fFL]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFL]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFL]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[bB][01][01_]*[lL]?">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[0-7_]+[lL]?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0|[1-9][0-9_]*[lL]?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="([~^*!%&\[\](){}<>|+=:;,./?-]|q{)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="([^\W\d]|\$)[\w$]*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="class">
+ <rule pattern="([^\W\d]|\$)[\w$]*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="[\w.]+\*?">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,213 @@
+<lexer>
+ <config>
+ <name>Dart</name>
+ <alias>dart</alias>
+ <filename>*.dart</filename>
+ <mime_type>text/x-dart</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="string_double_multiline">
+ <rule pattern=""""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^"$\\]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule>
+ <include state="string_common"/>
+ </rule>
+ <rule pattern="(\$|\")+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="class">
+ <rule pattern="[a-zA-Z_$]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="import_decl">
+ <rule>
+ <include state="string_literal"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\b(as|show|hide)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[a-zA-Z_$]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\,">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\;">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string_single_multiline">
+ <rule pattern="'''">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\'$\\]+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule>
+ <include state="string_common"/>
+ </rule>
+ <rule pattern="(\$|\')+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="string_literal"/>
+ </rule>
+ <rule pattern="#!(.*?)$">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\b(import|export)\b">
+ <token type="Keyword"/>
+ <push state="import_decl"/>
+ </rule>
+ <rule pattern="\b(library|source|part of|part)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\b(class)\b(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="\b(assert|break|case|catch|continue|default|do|else|finally|for|if|in|is|new|return|super|switch|this|throw|try|while)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(abstract|async|await|const|extends|factory|final|get|implements|native|operator|required|set|static|sync|typedef|var|with|yield)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="\b(bool|double|dynamic|int|num|Object|String|void)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\b(false|null|true)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]|as\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[a-zA-Z_$]\w*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="[a-zA-Z_$]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[(){}\[\],.;]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d+(\.\d*)?([eE][+-]?\d+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\.\d+([eE][+-]?\d+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="string_literal">
+ <rule pattern="r"""([\w\W]*?)"""">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="r'''([\w\W]*?)'''">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="r"(.*?)"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="r'(.*?)'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern=""""">
+ <token type="LiteralStringDouble"/>
+ <push state="string_double_multiline"/>
+ </rule>
+ <rule pattern="'''">
+ <token type="LiteralStringSingle"/>
+ <push state="string_single_multiline"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string_double"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <push state="string_single"/>
+ </rule>
+ </state>
+ <state name="string_common">
+ <rule pattern="\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z'\"$\\])">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="(\$)([a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="LiteralStringInterpol"/>
+ <token type="Name"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$\{)(.*?)(\})">
+ <bygroups>
+ <token type="LiteralStringInterpol"/>
+ <usingself state="root"/>
+ <token type="LiteralStringInterpol"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="string_double">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^"$\\\n]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule>
+ <include state="string_common"/>
+ </rule>
+ <rule pattern="\$+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="string_single">
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^'$\\\n]+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule>
+ <include state="string_common"/>
+ </rule>
+ <rule pattern="\$+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,39 @@
+<lexer>
+ <config>
+ <name>Dax</name>
+ <alias>dax</alias>
+ <filename>*.dax</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="--.*\n?"><token type="CommentSingle"/></rule>
+ <rule pattern="//.*\n?"><token type="CommentSingle"/></rule>
+ <rule pattern="/\*"><token type="CommentMultiline"/><push state="multiline-comments"/></rule>
@@ -0,0 +1,17 @@
+<lexer>
+ <config>
+ <name>Desktop file</name>
+ <alias>desktop</alias>
+ <alias>desktop_entry</alias>
+ <filename>*.desktop</filename>
+ <mime_type>application/x-desktop</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^[ \t]*\n"><token type="TextWhitespace"/></rule>
+ <rule pattern="^(#.*)(\n)"><bygroups><token type="CommentSingle"/><token type="TextWhitespace"/></bygroups></rule>
+ <rule pattern="(\[[^\]\n]+\])(\n)"><bygroups><token type="Keyword"/><token type="TextWhitespace"/></bygroups></rule>
+ <rule pattern="([-A-Za-z0-9]+)(\[[^\] \t=]+\])?([ \t]*)(=)([ \t]*)([^\n]*)([ \t\n]*\n)"><bygroups><token type="NameAttribute"/><token type="NameNamespace"/><token type="TextWhitespace"/><token type="Operator"/><token type="TextWhitespace"/><token type="LiteralString"/><token type="TextWhitespace"/></bygroups></rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,52 @@
+<lexer>
+ <config>
+ <name>Diff</name>
+ <alias>diff</alias>
+ <alias>udiff</alias>
+ <filename>*.diff</filename>
+ <filename>*.patch</filename>
+ <mime_type>text/x-diff</mime_type>
+ <mime_type>text/x-patch</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern=" .*\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\d+(,\d+)?(a|c|d)\d+(,\d+)?\n">
+ <token type="GenericSubheading"/>
+ </rule>
+ <rule pattern="---\n">
+ <token type="GenericStrong"/>
+ </rule>
+ <rule pattern="< .*\n">
+ <token type="GenericDeleted"/>
+ </rule>
+ <rule pattern="> .*\n">
+ <token type="GenericInserted"/>
+ </rule>
+ <rule pattern="\+.*\n">
+ <token type="GenericInserted"/>
+ </rule>
+ <rule pattern="-.*\n">
+ <token type="GenericDeleted"/>
+ </rule>
+ <rule pattern="!.*\n">
+ <token type="GenericStrong"/>
+ </rule>
+ <rule pattern="@.*\n">
+ <token type="GenericSubheading"/>
+ </rule>
+ <rule pattern="([Ii]ndex|diff).*\n">
+ <token type="GenericHeading"/>
+ </rule>
+ <rule pattern="=.*\n">
+ <token type="GenericHeading"/>
+ </rule>
+ <rule pattern=".*\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,153 @@
+<lexer>
+ <config>
+ <name>Django/Jinja</name>
+ <alias>django</alias>
+ <alias>jinja</alias>
+ <mime_type>application/x-django-templating</mime_type>
+ <mime_type>application/x-jinja</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="var">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(-?)(\}\})">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="varnames"/>
+ </rule>
+ </state>
+ <state name="block">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(-?)(%\})">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="varnames"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[^{]+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="\{\{">
+ <token type="CommentPreproc"/>
+ <push state="var"/>
+ </rule>
+ <rule pattern="\{[*#].*?[*#]\}">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(\{%)(-?\s*)(comment)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endcomment)(\s*-?)(%\})">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ <token type="Comment"/>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{%)(-?\s*)(filter)(\s+)([a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ <push state="block"/>
+ </rule>
+ <rule pattern="(\{%)(-?\s*)([a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="block"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Other"/>
+ </rule>
+ </state>
+ <state name="varnames">
+ <rule pattern="(\|)(\s*)([a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(is)(\s+)(not)?(\s+)?([a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(_|true|false|none|True|False|None)\b">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="(in|as|reversed|recursive|not|and|or|is|if|else|import|with(?:(?:out)?\s*context)?|scoped|ignore\s+missing)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(loop|block|super|forloop)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[a-zA-Z_][\w-]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\.\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern=":?"(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=":?'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="([{}()\[\]+\-*/,:~]|[><=]=?)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,44 @@
+<?xml version="1.0"?>
+<lexer>
+ <config>
+ <name>dns</name>
+ <alias>zone</alias>
+ <alias>bind</alias>
+ <filename>*.zone</filename>
+ <mime_type>text/dns</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\b(IN|A|AAAA|AFSDB|APL|CAA|CDNSKEY|CDS|CERT|CNAME|DHCID|DLV|DNAME|DNSKEY|DS|HIP|IPSECKEY|KEY|KX|LOC|MX|NAPTR|NS|NSEC|NSEC3|NSEC3PARAM|PTR|RRSIG|RP|SIG|SOA|SRV|SSHFP|TA|TKEY|TLSA|TSIG|TXT)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern=";.*(\S|$)">
+ <token type="Comment"/>
+ </rule>
@@ -0,0 +1,57 @@
+<lexer>
+ <config>
+ <name>Docker</name>
+ <alias>docker</alias>
+ <alias>dockerfile</alias>
+ <filename>Dockerfile</filename>
+ <filename>Dockerfile.*</filename>
+ <filename>*.Dockerfile</filename>
+ <filename>*.docker</filename>
+ <mime_type>text/x-dockerfile-config</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="#.*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(ONBUILD)((?:\s*\\?\s*))">
+ <bygroups>
+ <token type="Keyword"/>
+ <using lexer="Bash"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(HEALTHCHECK)(((?:\s*\\?\s*)--\w+=\w+(?:\s*\\?\s*))*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <using lexer="Bash"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(VOLUME|ENTRYPOINT|CMD|SHELL)((?:\s*\\?\s*))(\[.*?\])">
+ <bygroups>
+ <token type="Keyword"/>
+ <using lexer="Bash"/>
+ <using lexer="JSON"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(LABEL|ENV|ARG)((?:(?:\s*\\?\s*)\w+=\w+(?:\s*\\?\s*))*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <using lexer="Bash"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?:FROM|MAINTAINER|EXPOSE|WORKDIR|USER|STOPSIGNAL)|VOLUME)\b(.*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?:RUN|CMD|ENTRYPOINT|ENV|ARG|LABEL|ADD|COPY))">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(.*\\\n)*.+">
+ <using lexer="Bash"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,168 @@
+<lexer>
+ <config>
+ <name>DTD</name>
+ <alias>dtd</alias>
+ <filename>*.dtd</filename>
+ <mime_type>application/xml-dtd</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="common">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(%|&)[^;]*;">
+ <token type="NameEntity"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="[(|)*,?+]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=""[^"]*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\'[^\']*\'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^-]+">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="-->">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="-">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="element">
+ <rule>
+ <include state="common"/>
+ </rule>
+ <rule pattern="EMPTY|ANY|#PCDATA">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[^>\s|()?+*,]+">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern=">">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="attlist">
+ <rule>
+ <include state="common"/>
+ </rule>
+ <rule pattern="CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="#REQUIRED|#IMPLIED|#FIXED">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="xml:space|xml:lang">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="[^>\s|()?+*,]+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern=">">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="entity">
+ <rule>
+ <include state="common"/>
+ </rule>
+ <rule pattern="SYSTEM|PUBLIC|NDATA">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[^>\s|()?+*,]+">
+ <token type="NameEntity"/>
+ </rule>
+ <rule pattern=">">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="notation">
+ <rule>
+ <include state="common"/>
+ </rule>
+ <rule pattern="SYSTEM|PUBLIC">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[^>\s|()?+*,]+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern=">">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="common"/>
+ </rule>
+ <rule pattern="(<!ELEMENT)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ </bygroups>
+ <push state="element"/>
+ </rule>
+ <rule pattern="(<!ATTLIST)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ </bygroups>
+ <push state="attlist"/>
+ </rule>
+ <rule pattern="(<!ENTITY)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameEntity"/>
+ </bygroups>
+ <push state="entity"/>
+ </rule>
+ <rule pattern="(<!NOTATION)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ </bygroups>
+ <push state="notation"/>
+ </rule>
+ <rule pattern="(<!\[)([^\[\s]+)(\s*)(\[)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="NameEntity"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<!DOCTYPE)(\s+)([^>\s]+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="PUBLIC|SYSTEM">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[\[\]>]">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,176 @@
+<lexer>
+ <config>
+ <name>Dylan</name>
+ <alias>dylan</alias>
+ <filename>*.dylan</filename>
+ <filename>*.dyl</filename>
+ <filename>*.intr</filename>
+ <mime_type>text/x-dylan</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="([a-z0-9-]+:)([ \t]*)(.*(?:\n[ \t].+)*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="TextWhitespace"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <push state="code"/>
+ </rule>
+ </state>
+ <state name="code">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="#b[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="#o[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[-+]?(\d*\.\d+([ed][-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[-+]?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="#x[0-9a-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(\?\\?)([\w!&*<>|^$%@+~?/=-]+)(:)(token|name|variable|expression|body|case-body|\*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameVariable"/>
+ <token type="Operator"/>
+ <token type="NameBuiltin"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\?)(:)(token|name|variable|expression|body|case-body|\*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Operator"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\?\\?)([\w!&*<>|^$%@+~?/=-]+)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(=>|::|#\(|#\[|##|\?\?|\?=|\?|[(){}\[\],.;])">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=":=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="#[tf]">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="#"">
+ <token type="LiteralStringSymbol"/>
+ <push state="symbol"/>
+ </rule>
+ <rule pattern="#[a-z0-9-]+">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="#(all-keys|include|key|next|rest)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[\w!&*<>|^$%@+~?/=-]+:">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="<[\w!&*<>|^$%@+~?/=-]+>">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="\*[\w!&*<>|^$%@+~?/=-]+\*">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="\$[\w!&*<>|^$%@+~?/=-]+">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="(let|method|function)([ \t]+)([\w!&*<>|^$%@+~?/=-]+)">
+ <bygroups>
+ <token type="NameBuiltin"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(error|signal|return|break)">
+ <token type="NameException"/>
+ </rule>
+ <rule pattern="(\\?)([\w!&*<>|^$%@+~?/=-]+)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Name"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="symbol">
+ <rule pattern=""">
+ <token type="LiteralStringSymbol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,90 @@
+<lexer>
+ <config>
+ <name>EBNF</name>
+ <alias>ebnf</alias>
+ <filename>*.ebnf</filename>
+ <mime_type>text/x-ebnf</mime_type>
+ </config>
+ <rules>
+ <state name="comment">
+ <rule pattern="[^*)]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule>
+ <include state="comment_start"/>
+ </rule>
+ <rule pattern="\*\)">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[*)]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="identifier">
+ <rule pattern="([a-zA-Z][\w \-]*)">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comment_start"/>
+ </rule>
+ <rule>
+ <include state="identifier"/>
+ </rule>
+ <rule pattern="=">
+ <token type="Operator"/>
+ <push state="production"/>
+ </rule>
+ </state>
+ <state name="production">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comment_start"/>
+ </rule>
+ <rule>
+ <include state="identifier"/>
+ </rule>
+ <rule pattern=""[^"]*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'[^']*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="(\?[^?]*\?)">
+ <token type="NameEntity"/>
+ </rule>
+ <rule pattern="[\[\]{}(),|]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="-">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="comment_start">
+ <rule pattern="\(\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,744 @@
+<lexer>
+ <config>
+ <name>Elixir</name>
+ <alias>elixir</alias>
+ <alias>ex</alias>
+ <alias>exs</alias>
+ <filename>*.ex</filename>
+ <filename>*.eex</filename>
+ <filename>*.exs</filename>
+ <mime_type>text/x-elixir</mime_type>
+ </config>
+ <rules>
+ <state name="cb-intp">
+ <rule pattern="[^#\}\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\}[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="triquot-end">
+ <rule pattern="[a-zA-Z]+">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="apos-no-intp">
+ <rule pattern="[^'\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="'[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="slas-no-intp">
+ <rule pattern="[^/\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="/[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="pipe-no-intp">
+ <rule pattern="[^\|\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\|[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="apos-intp">
+ <rule pattern="[^#'\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="'[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="cb-no-intp">
+ <rule pattern="[^\}\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\}[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="heredoc_double">
+ <rule pattern="^\s*"""">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="heredoc_interpol"/>
+ </rule>
+ </state>
+ <state name="triapos-end">
+ <rule pattern="[a-zA-Z]+">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="interpol_string">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="triquot-intp">
+ <rule pattern="^\s*"""">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="heredoc_interpol"/>
+ </rule>
+ </state>
+ <state name="interpol">
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpol_string"/>
+ </rule>
+ </state>
+ <state name="pa-no-intp">
+ <rule pattern="[^\)\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\)[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="map_key">
+ <rule>
+ <include state="root"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ <push state="map_val"/>
+ </rule>
+ <rule pattern="=>">
+ <token type="Punctuation"/>
+ <push state="map_val"/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="pa-intp">
+ <rule pattern="[^#\)\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\)[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="tuple">
+ <rule>
+ <include state="root"/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(\?)(\\x\{)([\da-fA-F]+)(\})">
+ <bygroups>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringEscape"/>
+ <token type="LiteralNumberHex"/>
+ <token type="LiteralStringEscape"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\?)(\\x[\da-fA-F]{1,2})">
+ <bygroups>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringEscape"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\?)(\\[abdefnrstv])">
+ <bygroups>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringEscape"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\?\\?.">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern=":::">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="::">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=":(?:\.\.\.|<<>>|%\{\}|%|\{\})">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern=":(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&))">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern=":"">
+ <token type="LiteralStringSymbol"/>
+ <push state="string_double_atom"/>
+ </rule>
+ <rule pattern=":'">
+ <token type="LiteralStringSymbol"/>
+ <push state="string_single_atom"/>
+ </rule>
+ <rule pattern="((?:\.\.\.|<<>>|%\{\}|%|\{\})|(?:(?:\.\.\.|[a-z_]\w*[!?]?)|[A-Z]\w*(?:\.[A-Z]\w*)*|(?:\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>|\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~|\<|\>|\+|\-|\*|\/|\!|\^|\&)))(:)(?=\s|\n)">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(fn|do|end|after|else|rescue|catch)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(not|and|or|when|in)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(case|cond|for|if|unless|try|receive|raise|quote|unquote|unquote_splicing|throw|super|while)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(def|defp|defmodule|defprotocol|defmacro|defmacrop|defdelegate|defexception|defstruct|defimpl|defcallback)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(import|require|use|alias)\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(nil|true|false)\b">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="(_|__MODULE__|__DIR__|__ENV__|__CALLER__)\b">
+ <token type="NamePseudo"/>
+ </rule>
+ <rule pattern="@(?:\.\.\.|[a-z_]\w*[!?]?)">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(?:\.\.\.|[a-z_]\w*[!?]?)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="(%?)([A-Z]\w*(?:\.[A-Z]\w*)*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\<\<\<|\>\>\>|\|\|\||\&\&\&|\^\^\^|\~\~\~|\=\=\=|\!\=\=|\~\>\>|\<\~\>|\|\~\>|\<\|\>">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\=\=|\!\=|\<\=|\>\=|\&\&|\|\||\<\>|\+\+|\-\-|\|\>|\=\~|\-\>|\<\-|\||\.|\=|\~\>|\<\~">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\\\\|\<\<|\>\>|\=\>|\(|\)|\:|\;|\,|\[|\]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="&\d">
+ <token type="NameEntity"/>
+ </rule>
+ <rule pattern="\<|\>|\+|\-|\*|\/|\!|\^|\&">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="0b[01](_?[01])*">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0o[0-7](_?[0-7])*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0x[\da-fA-F](_?[\dA-Fa-f])*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d(_?\d)*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""""\s*">
+ <token type="LiteralStringHeredoc"/>
+ <push state="heredoc_double"/>
+ </rule>
+ <rule pattern="'''\s*$">
+ <token type="LiteralStringHeredoc"/>
+ <push state="heredoc_single"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string_double"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <push state="string_single"/>
+ </rule>
+ <rule>
+ <include state="sigils"/>
+ </rule>
+ <rule pattern="%\{">
+ <token type="Punctuation"/>
+ <push state="map_key"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="tuple"/>
+ </rule>
+ </state>
+ <state name="sigils">
+ <rule pattern="(~[a-z])(""")">
+ <bygroups>
+ <token type="LiteralStringOther"/>
+ <token type="LiteralStringHeredoc"/>
+ </bygroups>
+ <push state="triquot-end" state="triquot-intp"/>
+ </rule>
+ <rule pattern="(~[A-Z])(""")">
+ <bygroups>
+ <token type="LiteralStringOther"/>
+ <token type="LiteralStringHeredoc"/>
+ </bygroups>
+ <push state="triquot-end" state="triquot-no-intp"/>
+ </rule>
+ <rule pattern="(~[a-z])(''')">
+ <bygroups>
+ <token type="LiteralStringOther"/>
+ <token type="LiteralStringHeredoc"/>
+ </bygroups>
+ <push state="triapos-end" state="triapos-intp"/>
+ </rule>
+ <rule pattern="(~[A-Z])(''')">
+ <bygroups>
+ <token type="LiteralStringOther"/>
+ <token type="LiteralStringHeredoc"/>
+ </bygroups>
+ <push state="triapos-end" state="triapos-no-intp"/>
+ </rule>
+ <rule pattern="~[a-z]\{">
+ <token type="LiteralStringOther"/>
+ <push state="cb-intp"/>
+ </rule>
+ <rule pattern="~[A-Z]\{">
+ <token type="LiteralStringOther"/>
+ <push state="cb-no-intp"/>
+ </rule>
+ <rule pattern="~[a-z]\[">
+ <token type="LiteralStringOther"/>
+ <push state="sb-intp"/>
+ </rule>
+ <rule pattern="~[A-Z]\[">
+ <token type="LiteralStringOther"/>
+ <push state="sb-no-intp"/>
+ </rule>
+ <rule pattern="~[a-z]\(">
+ <token type="LiteralStringOther"/>
+ <push state="pa-intp"/>
+ </rule>
+ <rule pattern="~[A-Z]\(">
+ <token type="LiteralStringOther"/>
+ <push state="pa-no-intp"/>
+ </rule>
+ <rule pattern="~[a-z]<">
+ <token type="LiteralStringOther"/>
+ <push state="ab-intp"/>
+ </rule>
+ <rule pattern="~[A-Z]<">
+ <token type="LiteralStringOther"/>
+ <push state="ab-no-intp"/>
+ </rule>
+ <rule pattern="~[a-z]/">
+ <token type="LiteralStringOther"/>
+ <push state="slas-intp"/>
+ </rule>
+ <rule pattern="~[A-Z]/">
+ <token type="LiteralStringOther"/>
+ <push state="slas-no-intp"/>
+ </rule>
+ <rule pattern="~[a-z]\|">
+ <token type="LiteralStringOther"/>
+ <push state="pipe-intp"/>
+ </rule>
+ <rule pattern="~[A-Z]\|">
+ <token type="LiteralStringOther"/>
+ <push state="pipe-no-intp"/>
+ </rule>
+ <rule pattern="~[a-z]"">
+ <token type="LiteralStringOther"/>
+ <push state="quot-intp"/>
+ </rule>
+ <rule pattern="~[A-Z]"">
+ <token type="LiteralStringOther"/>
+ <push state="quot-no-intp"/>
+ </rule>
+ <rule pattern="~[a-z]'">
+ <token type="LiteralStringOther"/>
+ <push state="apos-intp"/>
+ </rule>
+ <rule pattern="~[A-Z]'">
+ <token type="LiteralStringOther"/>
+ <push state="apos-no-intp"/>
+ </rule>
+ </state>
+ <state name="triapos-intp">
+ <rule pattern="^\s*'''">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="heredoc_interpol"/>
+ </rule>
+ </state>
+ <state name="string_single_atom">
+ <rule pattern="[^#'\\]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="(')">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="quot-intp">
+ <rule pattern="[^#"\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern=""[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="sb-no-intp">
+ <rule pattern="[^\]\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\][a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="slas-intp">
+ <rule pattern="[^#/\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="/[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="sb-intp">
+ <rule pattern="[^#\]\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\][a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="heredoc_no_interpol">
+ <rule pattern="[^\\\n]+">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="\n+">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ </state>
+ <state name="pipe-intp">
+ <rule pattern="[^#\|\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\|[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="map_val">
+ <rule>
+ <include state="root"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=\})">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="heredoc_single">
+ <rule pattern="^\s*'''">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="heredoc_interpol"/>
+ </rule>
+ </state>
+ <state name="heredoc_interpol">
+ <rule pattern="[^#\\\n]+">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="\n+">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="string_single">
+ <rule pattern="[^#'\\]+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="(')">
+ <bygroups>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="string_double_atom">
+ <rule pattern="[^#"\\]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="(")">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="ab-no-intp">
+ <rule pattern="[^>\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern=">[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="ab-intp">
+ <rule pattern="[^#>\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern=">[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="quot-no-intp">
+ <rule pattern="[^"\\]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern=""[a-zA-Z]*">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="triapos-no-intp">
+ <rule pattern="^\s*'''">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="heredoc_no_interpol"/>
+ </rule>
+ </state>
+ <state name="string_double">
+ <rule pattern="[^#"\\]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="(")">
+ <bygroups>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="interpol"/>
+ </rule>
+ </state>
+ <state name="escapes">
+ <rule pattern="(\\x\{)([\da-fA-F]+)(\})">
+ <bygroups>
+ <token type="LiteralStringEscape"/>
+ <token type="LiteralNumberHex"/>
+ <token type="LiteralStringEscape"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\\x[\da-fA-F]{1,2})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="(\\[abdefnrstv])">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="triquot-no-intp">
+ <rule pattern="^\s*"""">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="heredoc_no_interpol"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,119 @@
+<lexer>
+ <config>
+ <name>Elm</name>
+ <alias>elm</alias>
+ <filename>*.elm</filename>
+ <mime_type>text/x-elm</mime_type>
+ </config>
+ <rules>
+ <state name="shader">
+ <rule pattern="\|(?!\])">
+ <token type="NameEntity"/>
+ </rule>
+ <rule pattern="\|\]">
+ <token type="NameEntity"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".*\n">
+ <token type="NameEntity"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\{-">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="--.*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="doublequote"/>
+ </rule>
+ <rule pattern="^\s*module\s*">
+ <token type="KeywordNamespace"/>
+ <push state="imports"/>
+ </rule>
+ <rule pattern="^\s*import\s*">
+ <token type="KeywordNamespace"/>
+ <push state="imports"/>
+ </rule>
+ <rule pattern="\[glsl\|.*">
+ <token type="NameEntity"/>
+ <push state="shader"/>
+ </rule>
+ <rule pattern="(import|module|alias|where|port|else|type|case|then|let|as|of|if|in)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="[A-Z]\w*">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="^main ">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="\((<-|\|\||\|>|&&|\+\+|->|\.\.|//|>>|>=|/=|==|::|<~|<\||<=|<<|~|<|=|:|>|'|/|\\|\.|\^|-|`|\+|\*|\||%)\)">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(<-|\|\||\|>|&&|\+\+|->|\.\.|//|>>|>=|/=|==|::|<~|<\||<=|<<|~|<|=|:|>|'|/|\\|\.|\^|-|`|\+|\*|\||%)">
+ <token type="NameFunction"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule pattern="[a-z_][a-zA-Z_\']*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[,()\[\]{}]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="-(?!\})">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\{-">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="[^-}]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="-\}">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="doublequote">
+ <rule pattern="\\u[0-9a-fA-F]{4}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\[nrfvb\\"]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^"]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="imports">
+ <rule pattern="\w+(\.\w+)*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="_?\d+\.(?=\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="_?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,132 @@
+<lexer>
+ <config>
+ <name>EmacsLisp</name>
+ <alias>emacs</alias>
+ <alias>elisp</alias>
+ <alias>emacs-lisp</alias>
+ <filename>*.el</filename>
+ <mime_type>text/x-elisp</mime_type>
+ <mime_type>application/x-elisp</mime_type>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="[^"\\`]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="`((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)\'">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <push state="body"/>
+ </rule>
+ </state>
+ <state name="body">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\?([^\\]|\\.)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern=":((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="::((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="'((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="'">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="`">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[-+]?\d+\.?(?=[ "()\]\'\n,;`])">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[-+]?\d+/\d+(?=[ "()\]\'\n,;`])">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)(?=[ "()\]\'\n,;`])">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\[|\]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="#:((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="#\^\^?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="#\'">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="#[bB][+-]?[01]+(/[01]+)?">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="#[oO][+-]?[0-7]+(/[0-7]+)?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="#[xX][+-]?[0-9a-fA-F]+(/[0-9a-fA-F]+)?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="#\d+r[+-]?[0-9a-zA-Z]+(/[0-9a-zA-Z]+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="#\d+=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="#\d+#">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(,@|,|\.|:)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(t|nil)(?=[ "()\]\'\n,;`])">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\*((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)\*">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="((?:\\.|[\w!$%&*+-/<=>?@^{}~|])(?:\\.|[\w!$%&*+-/<=>?@^{}~|]|[#.:])*)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="#\(">
+ <token type="Operator"/>
+ <push state="body"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="body"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,166 @@
+<lexer>
+ <config>
+ <name>Erlang</name>
+ <alias>erlang</alias>
+ <filename>*.erl</filename>
+ <filename>*.hrl</filename>
+ <filename>*.es</filename>
+ <filename>*.escript</filename>
+ <mime_type>text/x-erlang</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="%.*\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(receive|after|begin|catch|query|case|cond|when|let|fun|end|try|of|if)\b">
+ <token type="Keyword"/>
+ </rule>
@@ -0,0 +1,412 @@
+<lexer>
+ <config>
+ <name>Factor</name>
+ <alias>factor</alias>
+ <filename>*.factor</filename>
+ <mime_type>text/x-factor</mime_type>
+ </config>
+ <rules>
+ <state name="base">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(M:[:]?)(\s+)(\S+)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(C:)(\s+)(\S+)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(GENERIC:)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\(\s">
+ <token type="NameFunction"/>
+ <push state="stackeffect"/>
+ </rule>
+ <rule pattern=";\s">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(USING:)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="vocabs"/>
+ </rule>
+ <rule pattern="(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="words"/>
+ </rule>
+ <rule pattern="(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ <push state="slots"/>
+ </rule>
+ <rule pattern="(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ <push state="slots"/>
+ </rule>
+ <rule pattern="(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(C:)(\s+)(\S+)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(INSTANCE:)(\s+)(\S+)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(SLOT:)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(SINGLETON:)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="SINGLETONS:">
+ <token type="Keyword"/>
+ <push state="classes"/>
+ </rule>
+ <rule pattern="(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="SYMBOLS:\s">
+ <token type="Keyword"/>
+ <push state="words"/>
+ </rule>
+ <rule pattern="SYNTAX:\s">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="ALIEN:\s">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(STRUCT:)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^)]+\)\s)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?:<PRIVATE|PRIVATE>)\s">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern=""""\s+(?:.|\n)*?\s+"""">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""(?:\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\S+"\s+(?:\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="!\s+.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="#!\s+.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="/\*\s+(?:.|\n)*?\s\*/\s">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="[tf]\s">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="[\\$]\s+\S+">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="M\\\s+\S+\s+\S+">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="0b[01]+\s">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0o[0-7]+\s">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(?:deprecated|final|foldable|flushable|inline|recursive)\s">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(identity-hashcode|callstack>array|identity-tuple\?|identity-tuple|retainstack|callstack\?|tri-curry\*|tri-curry@|tri-curry|<wrapper>|datastack|bi-curry@|bi-curry\*|hashcode\*|callstack|\?execute|hashcode|boolean\?|compose\?|>boolean|wrapper\?|bi-curry|unless\*|boolean|assert\?|\(clone\)|either\?|prepose|assert=|execute|wrapper|compose|3curry|assert|2curry|curry\?|object|equal\?|tuple\?|unless|build|3drop|same\?|2tri\*|2tri@|both\?|3keep|4drop|throw|2over|swapd|clear|2keep|2drop|until|curry|4keep|clone|while|tuple|when\*|-rot|tri@|dupd|drop|tri\*|call|when|with|4dup|4dip|3tri|3dup|3dip|2tri|keep|loop|most|2nip|swap|2dup|null|2dip|2bi\*|2bi@|pick|over|and|rot|not|nip|new|if\*|tri|2bi|boa|eq\?|dup|3bi|dip|die|bi\*|bi@|\?if|xor|bi|do|if|or|\?|=)\s">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(assoc-clone-like|assoc-filter-as|assoc-partition|assoc-intersect|assoc-hashcode|assoc-combine|assoc-filter!|assoc-subset\?|assoc-union!|maybe-set-at|extract-keys|assoc-map-as|assoc-differ|assoc-refine|assoc-empty\?|assoc-filter|assoc-diff!|sift-values|assoc-union|assoc-stack|clear-assoc|assoc-all\?|delete-at\*|assoc-find|substitute|assoc-each|assoc-size|assoc-diff|assoc-any\?|assoc-like|rename-at|sift-keys|new-assoc|map>assoc|value-at\*|assoc-map|delete-at|change-at|assoc>map|value-at|push-at|assoc=|values|set-at|<enum>|inc-at|2cache|value\?|assoc\?|>alist|cache|enum\?|assoc|unzip|key\?|enum|keys|\?at|\?of|zip|at\+|at\*|at|of)\s">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(shallow-spread>quot|recursive-hashcode|linear-case-quot|deep-spread>quot|to-fixed-point|execute-effect|wrong-values\?|4cleave>quot|2cleave>quot|wrong-values|3cleave>quot|cleave>quot|call-effect|alist>quot|case>quot|case-find|cond>quot|no-case\?|no-cond\?|no-case|no-cond|4cleave|3cleave|2cleave|cleave|spread|cond|case)\s">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(log2-expects-positive\?|integer>fixnum-strict|log2-expects-positive|out-of-fixnum-range\?|out-of-fixnum-range|find-last-integer|next-power-of-2|\(all-integers\?\)|integer>fixnum|\(find-integer\)|\(each-integer\)|imaginary-part|fp-nan-payload|all-integers\?|find-integer|each-integer|fp-infinity\?|fp-special\?|fp-bitwise=|bits>double|double>bits|power-of-2\?|unless-zero|denominator|next-float|bits>float|float>bits|prev-float|unordered\?|real-part|when-zero|numerator|rational\?|>integer|rational|complex\?|<fp-nan>|fp-qnan\?|fp-snan\?|integer\?|number=|bignum\?|integer|>fixnum|fp-sign|fp-nan\?|fixnum\?|number\?|complex|if-zero|>bignum|bignum|number|fixnum|float\?|bitxor|ratio\?|bitnot|bitand|>float|real\?|bitor|zero\?|even\?|times|shift|float|recip|align|ratio|neg\?|real|log2|bit\?|odd\?|/mod|\?1\+|mod|rem|neg|sgn|u<=|u>=|abs|u>|2/|2\^|/i|/f|sq|<=|u<|>=|-|\+|<|\*|/|>)\s">
+ <token type="NameBuiltin"/>
+ </rule>
@@ -0,0 +1,68 @@
+<lexer>
+ <config>
+ <name>Fennel</name>
+ <alias>fennel</alias>
+ <alias>fnl</alias>
+ <filename>*.fennel</filename>
+ <mime_type>text/x-fennel</mime_type>
+ <mime_type>application/x-fennel</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern=";.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="-?\d+\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="0x-?[abcdef\d]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'(?!#)[\w!$%*+<=>?/.#-]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="\\(.|[a-z]+)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="::?#?(?!#)[\w!$%*+<=>?/.#-]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="~@|[`\'#^~&@]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(require-macros|set-forcibly!|import-macros|eval-compiler|pick-values|accumulate|macrodebug|pick-args|with-open|icollect|partial|comment|include|collect|hashfn|rshift|values|length|lshift|quote|match|while|doto|band|when|bnot|bxor|not=|tset|-\?>>|each|->>|let|doc|for|and|set|not|-\?>|bor|lua|\?\.|do|>=|<=|//|\.\.|->|or|if|~=|\^|>|=|<|:|/|\.|-|\+|\*|%|#) ">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(global|lambda|macros|local|macro|var|fn|λ) ">
+ <token type="KeywordDeclaration"/>
+ </rule>
@@ -0,0 +1,159 @@
+<lexer>
+ <config>
+ <name>Fish</name>
+ <alias>fish</alias>
+ <alias>fishshell</alias>
+ <filename>*.fish</filename>
+ <filename>*.load</filename>
+ <mime_type>application/x-fish</mime_type>
+ </config>
+ <rules>
+ <state name="paren">
+ <rule pattern="\)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="math">
+ <rule pattern="\)\)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[-+*/%^|&]|\*\*|\|\|">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\d+#\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+#(?! )">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule>
+ <include state="interp"/>
+ </rule>
+ <rule>
+ <include state="data"/>
+ </rule>
+ </state>
+ <state name="interp">
+ <rule pattern="\$\(\(">
+ <token type="Keyword"/>
+ <push state="math"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Keyword"/>
+ <push state="paren"/>
+ </rule>
+ <rule pattern="\$#?(\w+|.)">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="basic">
+ <rule pattern="(?<=(?:^|\A|;|&&|\|\||\||\b(continue|function|return|switch|begin|while|break|count|false|block|echo|case|true|else|exit|test|set|cdh|and|pwd|for|end|not|if|cd|or)\b)\s*)(continue|function|return|switch|begin|while|break|count|false|block|test|case|true|echo|exit|else|set|cdh|and|pwd|for|end|not|if|cd|or)(?=;?\b)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(?<=for\s+\S+\s+)in\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(fish_update_completions|fish_command_not_found|fish_breakpoint_prompt|fish_status_to_signal|fish_right_prompt|fish_is_root_user|fish_mode_prompt|fish_vcs_prompt|fish_key_reader|fish_svn_prompt|fish_git_prompt|fish_hg_prompt|fish_greeting|fish_add_path|commandline|fish_prompt|fish_indent|fish_config|fish_pager|breakpoint|fish_title|prompt_pwd|functions|set_color|realpath|funcsave|contains|complete|argparse|fish_opt|history|builtin|getopts|suspend|command|mimedb|printf|ulimit|disown|string|source|funced|status|random|isatty|fishd|prevd|vared|umask|nextd|alias|pushd|emit|jobs|popd|help|psub|wait|fish|read|time|exec|eval|math|trap|type|dirs|dirh|abbr|kill|bind|hash|open|fc|bg|fg)\s*\b(?!\.)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="#!.*\n">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="#.*\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\\[\w\W]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="(\b\w+)(\s*)(=)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[\[\]()={}]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(?<=\[[^\]]+)\.\.|-(?=[^\[]+\])">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(?<=set\s+(?:--?[^\d\W][\w-]*\s+)?)\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(?<=for\s+)\w[\w-]*(?=\s+in)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(?<=function\s+)\w(?:[^\n])*?(?= *[-\n])">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(?<=(?:^|\b(?:and|or|sudo)\b|;|\|\||&&|\||\(|(?:\b\w+\s*=\S+\s)) *)\w[\w-]*">
+ <token type="NameFunction"/>
+ </rule>
+ </state>
+ <state name="data">
+ <rule pattern="(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="(?s)'.*?'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="&&|\|\||&|\||\^|<|>">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\b\d+\b">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(?<=\s+)--?[^\d][\w-]*">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern=".+?">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule>
+ <include state="interp"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,78 @@
+<lexer>
+ <config>
+ <name>Forth</name>
+ <alias>forth</alias>
+ <filename>*.frt</filename>
+ <filename>*.fth</filename>
+ <filename>*.fs</filename>
+ <mime_type>application/x-forth</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\([\s].*?\)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(:|variable|constant|value|buffer:)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="worddef"/>
+ </rule>
+ <rule pattern="([.sc]")(\s+?)">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="stringdef"/>
+ </rule>
@@ -0,0 +1,102 @@
+<lexer>
+ <config>
+ <name>Fortran</name>
+ <alias>fortran</alias>
+ <alias>f90</alias>
+ <filename>*.f03</filename>
+ <filename>*.f90</filename>
+ <filename>*.f95</filename>
+ <filename>*.F03</filename>
+ <filename>*.F90</filename>
+ <filename>*.F95</filename>
+ <mime_type>text/x-fortran</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="core">
+ <rule pattern="\b(DO)(\s+)(CONCURRENT)\b">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(GO)(\s*)(TO)\b">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
@@ -0,0 +1,71 @@
+<lexer>
+ <config>
+ <name>FortranFixed</name>
+ <alias>fortranfixed</alias>
+ <filename>*.f</filename>
+ <filename>*.F</filename>
+ <mime_type>text/x-fortran</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="cont-char">
+ <rule pattern=" ">
+ <token type="TextWhitespace"/>
+ <push state="code"/>
+ </rule>
+ <rule pattern=".">
+ <token type="GenericStrong"/>
+ <push state="code"/>
+ </rule>
+ </state>
+ <state name="code">
+ <rule pattern="(.{66})(.*)(\n)">
+ <bygroups>
+ <using lexer="Fortran"/>
+ <token type="Comment"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="root"/>
+ </rule>
+ <rule pattern="(.*)(!.*)(\n)">
+ <bygroups>
+ <using lexer="Fortran"/>
+ <token type="Comment"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="root"/>
+ </rule>
+ <rule pattern="(.*)(\n)">
+ <bygroups>
+ <using lexer="Fortran"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="root"/>
+ </rule>
+ <rule>
+ <mutators>
+ <push state="root"/>
+ </mutators>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[C*].*\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="#.*\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern=" {0,4}!.*\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(.{5})">
+ <token type="NameLabel"/>
+ <push state="cont-char"/>
+ </rule>
+ <rule pattern=".*\n">
+ <using lexer="Fortran"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,245 @@
+<lexer>
+ <config>
+ <name>FSharp</name>
+ <alias>fsharp</alias>
+ <filename>*.fs</filename>
+ <filename>*.fsi</filename>
+ <mime_type>text/x-fsharp</mime_type>
+ </config>
+ <rules>
+ <state name="comment">
+ <rule pattern="[^(*)@"]+">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="Comment"/>
+ <push/>
+ </rule>
+ <rule pattern="\*\)">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="@"">
+ <token type="LiteralString"/>
+ <push state="lstring"/>
+ </rule>
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="tqs"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[(*)@]">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="escape-sequence"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""B?">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="lstring">
+ <rule pattern="[^"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="""">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""B?">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="tqs">
+ <rule pattern="[^"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""""B?">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="escape-sequence">
+ <rule pattern="\\[\\"\'ntbrafv]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\[0-9]{3}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\u[0-9a-fA-F]{4}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\U[0-9a-fA-F]{8}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\(\)|\[\]">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="\b(?<!\.)([A-Z][\w\']*)(?=\s*\.)">
+ <token type="NameNamespace"/>
+ <push state="dotted"/>
+ </rule>
+ <rule pattern="\b([A-Z][\w\']*)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="///.*?\n">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\(\*(?!\))">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="@"">
+ <token type="LiteralString"/>
+ <push state="lstring"/>
+ </rule>
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="tqs"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\b(open type|open|module)(\s+)([\w.]+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(let!?)(\s+)(\w+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(type)(\s+)(\w+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(member|override)(\s+)(\w+)(\.)(\w+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Name"/>
+ <token type="Punctuation"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(abstract|as|assert|base|begin|class|default|delegate|do!|do|done|downcast|downto|elif|else|end|exception|extern|false|finally|for|function|fun|global|if|inherit|inline|interface|internal|in|lazy|let!|let|match|member|module|mutable|namespace|new|null|of|open|override|private|public|rec|return!|return|select|static|struct|then|to|true|try|type|upcast|use!|use|val|void|when|while|with|yield!|yield|atomic|break|checked|component|const|constraint|constructor|continue|eager|event|external|fixed|functor|include|method|mixin|object|parallel|process|protected|pure|sealed|tailcall|trait|virtual|volatile)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="``([^`\n\r\t]|`[^`\n\r\t])+``">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="#[ \t]*(if|endif|else|line|nowarn|light|r|\d+)\b">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(!=|#|&&|&|\(|\)|\*|\+|,|-\.|->|-|\.\.|\.|::|:=|:>|:|;;|;|<-|<\]|<|>\]|>|\?\?|\?|\[<|\[\||\[|\]|_|`|\{|\|\]|\||\}|~|<@@|<@|=|@>|@@>)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(and|or|not)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="\b(sbyte|byte|char|nativeint|unativeint|float32|single|float|double|int8|uint8|int16|uint16|int32|uint32|int64|uint64|decimal|unit|bool|string|list|exn|obj|enum)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[^\W\d][\w']*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\d[\d_]*[uU]?[yslLnQRZINGmM]?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="0[xX][\da-fA-F][\da-fA-F_]*[uU]?[yslLn]?[fF]?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[oO][0-7][0-7_]*[uU]?[yslLn]?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[bB][01][01_]*[uU]?[yslLn]?">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)[fFmM]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'.'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="@?"">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[~?][a-z][\w\']*:">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="dotted">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[A-Z][\w\']*(?=\s*\.)">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="[A-Z][\w\']*">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-z_][\w\']*">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,150 @@
+<lexer>
+ <config>
+ <name>GAS</name>
+ <alias>gas</alias>
+ <alias>asm</alias>
+ <filename>*.s</filename>
+ <filename>*.S</filename>
+ <mime_type>text/x-gas</mime_type>
+ <priority>0.1</priority>
+ </config>
+ <rules>
+ <state name="punctuation">
+ <rule pattern="[-*,.()\[\]!:]+">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+):">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="\.(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
+ <token type="NameAttribute"/>
+ <push state="directive-args"/>
+ </rule>
+ <rule pattern="lock|rep(n?z)?|data\d+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
+ <token type="NameFunction"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="directive-args">
+ <rule pattern="(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern=""(\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="@(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(?:0[xX][a-zA-Z0-9]+|\d+)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="%(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([;#]|//).*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/[*].*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/[*].*?\n[\w\W]*?[*]/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ </state>
+ <state name="instruction-args">
+ <rule pattern="([a-z0-9]+)( )(<)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))(>)">
+ <bygroups>
+ <token type="LiteralNumberHex"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="NameConstant"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([a-z0-9]+)( )(<)((?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+))([-+])((?:0[xX][a-zA-Z0-9]+|\d+))(>)">
+ <bygroups>
+ <token type="LiteralNumberHex"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="NameConstant"/>
+ <token type="Punctuation"/>
+ <token type="LiteralNumberInteger"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="(?:0[xX][a-zA-Z0-9]+|\d+)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="%(?:[a-zA-Z$_][\w$.@-]*|\.[\w$.@-]+)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="$(?:0[xX][a-zA-Z0-9]+|\d+)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="$'(.|\\')'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([;#]|//).*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/[*].*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/[*].*?\n[\w\W]*?[*]/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="([;#]|//).*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/[*][\w\W]*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,259 @@
+<lexer>
+ <config>
+ <name>GDScript</name>
+ <alias>gdscript</alias>
+ <alias>gd</alias>
+ <filename>*.gd</filename>
+ <mime_type>text/x-gdscript</mime_type>
+ <mime_type>application/x-gdscript</mime_type>
+ <priority>0.1</priority>
+ <analyse>
+ <regex pattern="^export" score="0.1"/>
+ </analyse>
+ </config>
+ <rules>
+ <state name="dqs">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings_double"/>
+ </rule>
+ </state>
+ <state name="tdqs">
+ <rule pattern=""""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings_double"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(?<!\w)(PI|TAU|NAN|INF|true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(?<!\w)(is|in|as|not|or|and)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(?<!\w)(var|const|enum|signal|static)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(?<!\w)(if|elif|else|for|while|match|break|continue|pass|return|breakpoint|await|yield|super)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(?<!\w)(self)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ </state>
+ <state name="builtin_funcs">
+ <rule pattern="(?<!\w)(assert|char|convert|dict_to_inst|get_stack|inst_to_dict|is_instance_of|len|load|preload|print_debug|print_stack|range|type_exists)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\w)(abs[fi]?|acos|asin|atan2?|bezier_(derivative|interpolate)|bytes_to_var(_with_objects)?|ceil[fi]?|clamp[fi]?|cosh?|cubic_interpolate(_angle)?(_in_time)?|db_to_linear|deg_to_rad|ease|error_string|exp|floor[fi]?|fmod|fposmod|hash|instance_from_id|inverse_lerp|is_equal_approx|is_finite|is_instance(_id)?_valid|is_nan|is_same|is_zero_approx|lerp|lerp_angle|lerpf|linear_to_db|log|max[fi]?|min[fi]?|move_toward|nearest_po2|pingpong|posmod|pow|print|print_rich|print_verbose|printerr|printraw|prints|printt|push_error|push_warning|rad_to_deg|rand_from_seed|randf|randf_range|randfn|randi|randi_range|randomize|remap|rid_allocate_id|rid_from_int64|round[fi]?|seed|sign[fi]?|sinh?|smoothstep|snapped[fi]?|sqrt|step_decimals|str|str_to_var|tanh?|typeof|var_to_bytes(_with_objects)?|var_to_str|weakref|wrap[fi]?)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ </state>
+ <state name="tsqs">
+ <rule pattern="'''">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings_single"/>
+ </rule>
+ </state>
+ <state name="strings_single">
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule pattern="\{[^\\\'\n]+\}">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="[^\\\'\{%]+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="%">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="{">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="funcname">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+[eE][+-]?[0-9]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[a-fA-F0-9]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0b[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="sqs">
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings_single"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="annotations">
+ <rule pattern="^\s*@export(_category|_color_no_alpha|_dir|_enum|_exp_easing|_file|_flags((_2d|_3d)(_navigation|_physics|_render)|_avoidance)?|_global(_file|_dir)|_group|_multiline|_node_path|_placeholder|_range|_subgroup)?">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="^\s*@(icon|onready|rpc|tool|warning_ignore)">
+ <token type="NameDecorator"/>
+ </rule>
+ </state>
+ <state name="types">
+ <rule pattern="(?<!\w)(null|void|bool|int|float)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(?<!\w)(String(Name)?|NodePath|Vector[234]i?|Rect2|Transform[23]D|Plane|Quaternion|AABB|Basis|Color8?|RID|Object|(Packed(Byte|Int(32|64)|Float(32|64)|String|Vector(2|3)|Color))?Array|Dictionary|Signal|Callable)\b">
+ <token type="NameClass"/>
+ </rule>
@@ -0,0 +1,270 @@
+<lexer>
+ <config>
+ <name>GDScript3</name>
+ <alias>gdscript3</alias>
+ <alias>gd3</alias>
+ <filename>*.gd</filename>
+ <mime_type>text/x-gdscript</mime_type>
+ <mime_type>application/x-gdscript</mime_type>
+ <analyse>
+ <regex pattern="func (_ready|_init|_input|_process|_unhandled_input)" score="0.8"/>
+ <regex pattern="(extends |class_name |onready |preload|load|setget|func [^_])" score="0.4"/>
+ <regex pattern="(var|const|enum|export|signal|tool)" score="0.2"/>
+ </analyse>
+ </config>
+ <rules>
+ <state name="builtins">
+ <rule pattern="(?<!\.)(instance_from_id|nearest_po2|print_stack|type_exist|rand_range|linear2db|var2bytes|dict2inst|randomize|bytes2var|rand_seed|db2linear|inst2dict|printerr|printraw|decimals|preload|deg2rad|str2var|stepify|var2str|convert|weakref|fposmod|funcref|rad2deg|dectime|printt|is_inf|is_nan|assert|Color8|typeof|ColorN|prints|floor|atan2|yield|randf|print|range|clamp|round|randi|sqrt|tanh|cosh|ceil|ease|acos|load|fmod|lerp|seed|sign|atan|sinh|hash|asin|sin|str|cos|tan|pow|exp|min|abs|log|max)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\.)(self|false|true|PI|NAN|INF)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
@@ -0,0 +1,263 @@
+<lexer>
+ <config>
+ <name>Gherkin</name>
+ <alias>cucumber</alias>
+ <alias>Cucumber</alias>
+ <alias>gherkin</alias>
+ <alias>Gherkin</alias>
+ <filename>*.feature</filename>
+ <filename>*.FEATURE</filename>
+ <mime_type>text/x-gherkin</mime_type>
+ </config>
+ <rules>
+ <state name="comments">
+ <rule pattern="\s*#.*$">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="featureElementsOnStack">
@@ -0,0 +1,117 @@
+<lexer>
+ <config>
+ <name>Gleam</name>
+ <alias>gleam</alias>
+ <filename>*.gleam</filename>
+ <mime_type>text/x-gleam</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="///(.*?)\n">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern="//(.*?)\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(as|assert|case|opaque|panic|pub|todo)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(import|use)\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(auto|const|delegate|derive|echo|else|if|implement|macro|test)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(let)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(fn)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(type)\b">
+ <token type="Keyword"/>
+ <push state="typename"/>
+ </rule>
+ <rule pattern="(True|False)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="0[bB][01](_?[01])*">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[oO][0-7](_?[0-7])*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][\da-fA-F](_?[\dA-Fa-f])*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d(_?\d)*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="@([a-z_]\w*[!?]?)">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="[{}()\[\],]|[#(]|\.\.|<>|<<|>>">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[+\-*/%!=<>&|.]|<-">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=":|->">
+ <token type="Operator"/>
+ <push state="typename"/>
+ </rule>
+ <rule pattern="([a-z_][A-Za-z0-9_]*)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([A-Z][A-Za-z0-9_]*)(\()">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([a-z_]\w*[!?]?)">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="typename">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="[A-Z][A-Za-z0-9_]*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\["\\fnrt]|\\u\{[\da-fA-F]{1,6}\}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,65 @@
+<lexer>
+ <config>
+ <name>GLSL</name>
+ <alias>glsl</alias>
+ <filename>*.vert</filename>
+ <filename>*.frag</filename>
+ <filename>*.geo</filename>
+ <mime_type>text/x-glslsrc</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^#.*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="//.*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[?:]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\bdefined\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[;{}(),\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[+-]?\d*\.\d+([eE][-+]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+-]?\d+\.\d*([eE][-+]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[0-7]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[1-9][0-9]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\b(sampler3DsamplerCube|sampler2DShadow|sampler1DShadow|invariant|sampler1D|sampler2D|attribute|mat3mat4|centroid|continue|varying|uniform|discard|mat4x4|mat3x3|mat2x3|mat4x2|mat3x2|mat2x2|mat2x4|mat3x4|struct|return|mat4x3|bvec4|false|ivec4|ivec3|const|float|inout|ivec2|break|while|bvec3|bvec2|vec3|else|true|void|bool|vec2|vec4|mat2|for|out|int|in|do|if)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(sampler2DRectShadow|sampler2DRect|sampler3DRect|namespace|precision|interface|volatile|template|unsigned|external|noinline|mediump|typedef|default|switch|static|extern|inline|sizeof|output|packed|double|public|fvec3|class|union|short|highp|fixed|input|fvec4|hvec2|hvec3|hvec4|dvec2|dvec3|dvec4|fvec2|using|long|this|enum|lowp|cast|goto|half|asm)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,289 @@
+<lexer>
+ <config>
+ <name>Gnuplot</name>
+ <alias>gnuplot</alias>
+ <filename>*.plot</filename>
+ <filename>*.plt</filename>
+ <mime_type>text/x-gnuplot</mime_type>
+ </config>
+ <rules>
+ <state name="whitespace">
+ <rule pattern="#">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="[ \t\v\f]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="save">
+ <rule pattern="functions\b|function\b|functio\b|functi\b|funct\b|func\b|fun\b|fu\b|f\b|set\b|se\b|s\b|terminal\b|termina\b|termin\b|termi\b|term\b|ter\b|te\b|t\b|variables\b|variable\b|variabl\b|variab\b|varia\b|vari\b|var\b|va\b|v\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule>
+ <include state="genericargs"/>
+ </rule>
+ </state>
+ <state name="pause">
+ <rule pattern="(mouse|any|button1|button2|button3)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="keypress\b|keypres\b|keypre\b|keypr\b|keyp\b|key\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule>
+ <include state="genericargs"/>
+ </rule>
+ </state>
+ <state name="plot">
+ <rule pattern="axes\b|axe\b|ax\b|axis\b|axi\b|binary\b|binar\b|bina\b|bin\b|every\b|ever\b|eve\b|ev\b|index\b|inde\b|ind\b|in\b|i\b|matrix\b|matri\b|matr\b|mat\b|smooth\b|smoot\b|smoo\b|smo\b|sm\b|s\b|thru\b|title\b|titl\b|tit\b|ti\b|t\b|notitle\b|notitl\b|notit\b|noti\b|not\b|using\b|usin\b|usi\b|us\b|u\b|with\b|wit\b|wi\b|w\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule>
+ <include state="genericargs"/>
+ </rule>
+ </state>
+ <state name="if">
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="genericargs"/>
+ </rule>
+ </state>
+ <state name="genericargs">
+ <rule>
+ <include state="noargs"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="dqstring"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="sqstring"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[,.~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{}()\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(eq|ne)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="@[a-zA-Z_]\w*">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^\\\n]">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="Comment"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="bind\b|bin\b|bi\b">
+ <token type="Keyword"/>
+ <push state="bind"/>
+ </rule>
+ <rule pattern="exit\b|exi\b|ex\b|quit\b|qui\b|qu\b|q\b">
+ <token type="Keyword"/>
+ <push state="quit"/>
+ </rule>
+ <rule pattern="fit\b|fi\b|f\b">
+ <token type="Keyword"/>
+ <push state="fit"/>
+ </rule>
+ <rule pattern="(if)(\s*)(\()">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="if"/>
+ </rule>
+ <rule pattern="else\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="pause\b|paus\b|pau\b|pa\b">
+ <token type="Keyword"/>
+ <push state="pause"/>
+ </rule>
+ <rule pattern="plot\b|plo\b|pl\b|p\b|replot\b|replo\b|repl\b|rep\b|splot\b|splo\b|spl\b|sp\b">
+ <token type="Keyword"/>
+ <push state="plot"/>
+ </rule>
+ <rule pattern="save\b|sav\b|sa\b">
+ <token type="Keyword"/>
+ <push state="save"/>
+ </rule>
+ <rule pattern="set\b|se\b">
+ <token type="Keyword"/>
+ <push state="genericargs" state="optionarg"/>
+ </rule>
+ <rule pattern="show\b|sho\b|sh\b|unset\b|unse\b|uns\b">
+ <token type="Keyword"/>
+ <push state="noargs" state="optionarg"/>
+ </rule>
+ <rule pattern="lower\b|lowe\b|low\b|raise\b|rais\b|rai\b|ra\b|call\b|cal\b|ca\b|cd\b|clear\b|clea\b|cle\b|cl\b|help\b|hel\b|he\b|h\b|\?\b|history\b|histor\b|histo\b|hist\b|his\b|hi\b|load\b|loa\b|lo\b|l\b|print\b|prin\b|pri\b|pr\b|pwd\b|reread\b|rerea\b|rere\b|rer\b|re\b|reset\b|rese\b|res\b|screendump\b|screendum\b|screendu\b|screend\b|screen\b|scree\b|scre\b|scr\b|shell\b|shel\b|she\b|system\b|syste\b|syst\b|sys\b|sy\b|update\b|updat\b|upda\b|upd\b|up\b">
+ <token type="Keyword"/>
+ <push state="genericargs"/>
+ </rule>
+ <rule pattern="pwd\b|reread\b|rerea\b|rere\b|rer\b|re\b|reset\b|rese\b|res\b|screendump\b|screendum\b|screendu\b|screend\b|screen\b|scree\b|scre\b|scr\b|shell\b|shel\b|she\b|test\b">
+ <token type="Keyword"/>
+ <push state="noargs"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(=)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ <push state="genericargs"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ <push state="genericargs"/>
+ </rule>
+ <rule pattern="@[a-zA-Z_]\w*">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="dqstring">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="optionarg">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
@@ -0,0 +1,114 @@
+<lexer>
+ <config>
+ <name>Go Template</name>
+ <alias>go-template</alias>
+ <filename>*.gotmpl</filename>
+ <filename>*.go.tmpl</filename>
+ </config>
+ <rules>
+ <state name="template">
+ <rule pattern="[-]?}}">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=}})">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Operator"/>
+ <push state="subexpression"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="expression"/>
+ </rule>
+ </state>
+ <state name="subexpression">
+ <rule pattern="\)">
+ <token type="Operator"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="expression"/>
+ </rule>
+ </state>
+ <state name="expression">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Operator"/>
+ <push state="subexpression"/>
+ </rule>
+ <rule pattern="(range|if|else|while|with|template|end|true|false|nil|and|call|html|index|js|len|not|or|print|printf|println|urlquery|eq|ne|lt|le|gt|ge|block|break|continue|define|slice)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\||:?=|,">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[$]?[^\W\d]\w*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="\$|[$]?\.(?:[^\W\d]\w*)?">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="-?\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="-?\d+\.\d*([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\.\d+([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="-?\d+[Ee][-+]\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="-?\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?\.\d+([eE][+\-]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?0[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="-?0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="-?0b[01_]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="-?(0|[1-9][0-9]*)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="`[^`]*`">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="{{(- )?/\*(.|\n)*?\*/( -)?}}">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="{{[-]?">
+ <token type="CommentPreproc"/>
+ <push state="template"/>
+ </rule>
+ <rule pattern="[^{]+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="{">
+ <token type="Other"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,88 @@
+<lexer>
+ <config>
+ <name>GraphQL</name>
+ <alias>graphql</alias>
+ <alias>graphqls</alias>
+ <alias>gql</alias>
+ <filename>*.graphql</filename>
+ <filename>*.graphqls</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(query|mutation|subscription|fragment|scalar|implements|interface|union|enum|input|type)">
+ <token type="KeywordDeclaration"/>
+ <push state="type"/>
+ </rule>
+ <rule pattern="(on|extend|schema|directive|\.\.\.)">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(QUERY|MUTATION|SUBSCRIPTION|FIELD|FRAGMENT_DEFINITION|FRAGMENT_SPREAD|INLINE_FRAGMENT|SCHEMA|SCALAR|OBJECT|FIELD_DEFINITION|ARGUMENT_DEFINITION|INTERFACE|UNION|ENUM|ENUM_VALUE|INPUT_OBJECT|INPUT_FIELD_DEFINITION)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[^\W\d]\w*">
+ <token type="NameProperty"/>
+ </rule>
+ <rule pattern="\@\w+">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ <push state="type"/>
+ </rule>
+ <rule pattern="[\(\)\{\}\[\],!\|=]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\$\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+\.\d*([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\.\d+([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+[Ee][-+]\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\.\d+([eE][+\-]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(0|[1-9][0-9]*)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""""[\x00-\x7F]*?"""">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""(\\["\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])"">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""(true|false|null)*"">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="[\r\n\s]+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="#[^\r\n]*">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="type">
+ <rule pattern="[^\W\d]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,90 @@
+<lexer>
+ <config>
+ <name>Groff</name>
+ <alias>groff</alias>
+ <alias>nroff</alias>
+ <alias>man</alias>
+ <filename>*.[1-9]</filename>
+ <filename>*.1p</filename>
+ <filename>*.3pm</filename>
+ <filename>*.man</filename>
+ <mime_type>application/x-troff</mime_type>
+ <mime_type>text/troff</mime_type>
+ </config>
+ <rules>
+ <state name="request">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern=""[^\n"]+"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="(\.)(\w+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="request"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Punctuation"/>
+ <push state="request"/>
+ </rule>
+ <rule pattern="[^\\\n]+">
+ <token type="Text"/>
+ <push state="textline"/>
+ </rule>
+ <rule>
+ <push state="textline"/>
+ </rule>
+ </state>
+ <state name="textline">
+ <rule>
+ <include state="escapes"/>
+ </rule>
+ <rule pattern="[^\\\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="escapes">
+ <rule pattern="\\"[^\n]*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\\[fn]\w">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\\(.{2}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\.\[.*\]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ <push state="request"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,135 @@
+<lexer>
+ <config>
+ <name>Groovy</name>
+ <alias>groovy</alias>
+ <filename>*.groovy</filename>
+ <filename>*.gradle</filename>
+ <mime_type>text/x-groovy</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="#!(.*?)$">
+ <token type="CommentPreproc"/>
+ <push state="base"/>
+ </rule>
+ <rule>
+ <push state="base"/>
+ </rule>
+ </state>
+ <state name="base">
+ <rule pattern="^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="@[a-zA-Z_][\w.]*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="(as|assert|break|case|catch|continue|default|do|else|finally|for|if|in|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(abstract|const|extends|final|implements|native|private|protected|public|static|strictfp|super|synchronized|throws|transient|volatile)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(def|var|boolean|byte|char|double|float|int|long|short|void)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(package)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(class|interface|enum|trait|record)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="(import)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern="""".*?"""">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'''.*?'''">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="\$/((?!/\$).)*/\$">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="/(\\\\|\\"|[^/])*/">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(\.)([a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameAttribute"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="[a-zA-Z_$]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[~^*!%&\[\](){}<>|+=:;,./?-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+L?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="class">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="[\w.]+\*?">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,147 @@
+<lexer>
+ <config>
+ <name>Handlebars</name>
+ <alias>handlebars</alias>
+ <alias>hbs</alias>
+ <filename>*.handlebars</filename>
+ <filename>*.hbs</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[^{]+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="\{\{!.*\}\}">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(\{\{\{)(\s*)">
+ <bygroups>
+ <token type="CommentSpecial"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="(\{\{)(\s*)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ </state>
+ <state name="tag">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\}\}\}">
+ <token type="CommentSpecial"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\}\}">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([#/]*)(each|if|unless|else|with|log|in(?:line)?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="#\*inline">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="([#/])([\w-]+)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([\w-]+)(=)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(>)(\s*)(@partial-block)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(#?>)(\s*)([\w-]+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(>)(\s*)(\()">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="dynamic-partial"/>
+ </rule>
+ <rule>
+ <include state="generic"/>
+ </rule>
+ </state>
+ <state name="dynamic-partial">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(lookup)(\s+)(\.|this)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(lookup)(\s+)(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <usingself state="variable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[\w-]+">
+ <token type="NameFunction"/>
+ </rule>
+ <rule>
+ <include state="generic"/>
+ </rule>
+ </state>
+ <state name="variable">
+ <rule pattern="[a-zA-Z][\w-]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\.[\w-]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(this\/|\.\/|(\.\.\/)+)[\w-]+">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="generic">
+ <rule>
+ <include state="variable"/>
+ </rule>
+ <rule pattern=":?"(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=":?'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,98 @@
+<lexer>
+ <config>
+ <name>Hare</name>
+ <alias>hare</alias>
+ <filename>*.ha</filename>
+ <mime_type>text/x-hare</mime_type>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\0abfnrtv"']|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[\s\n]+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="@[a-z]+">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="//.*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="`[^`]*`">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'(\\[\\0abfnrtv"']||\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8})|[^\\'])'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(0|[1-9]\d*)\.\d+([eE][+-]?\d+)?(f32|f64)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(0|[1-9]\d*)([eE][+-]?\d+)?(f32|f64)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][+-]?\d+(f32|f64)?)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+[pP][+-]?\d+(f32|f64)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+(z|[iu](8|16|32|64)?)?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0o[0-7]+(z|[iu](8|16|32|64)?)?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0b[01]+(z|[iu](8|16|32|64)?)?">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="(0|[1-9]\d*)([eE][+-]?\d+)?(z|[iu](8|16|32|64)?)?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]|[ai]s\b|\.\.\.">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.{};]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="use\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(_|align|break|const|continue|else|enum|export|for|if|return|static|struct|offset|union|fn|free|assert|abort|alloc|let|len|def|type|match|switch|case|append|delete|insert|defer|yield|vastart|vaarg|vaend)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(size)([\s\n]*)(\()">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="TextWhitespace" />
+ <token type="Punctuation" />
+ </bygroups>
+ </rule>
+ <rule pattern="(str|size|rune|bool|int|uint|uintptr|u8|u16|u32|u64|i8|i16|i32|i64|f32|f64|null|void|done|nullable|valist|opaque|never)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,275 @@
+<lexer>
+ <config>
+ <name>Haskell</name>
+ <alias>haskell</alias>
+ <alias>hs</alias>
+ <filename>*.hs</filename>
+ <mime_type>text/x-haskell</mime_type>
+ </config>
+ <rules>
+ <state name="escape">
+ <rule pattern="[abfnrtv"\'&\\]">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\^[][\p{Lu}@^_]">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="o[0-7]+">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="x[\da-fA-F]+">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s+\\">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="--(?![!#$%&*+./<=>?@^|_~:\\]).*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\{-">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\bimport\b">
+ <token type="KeywordReserved"/>
+ <push state="import"/>
+ </rule>
+ <rule pattern="\bmodule\b">
+ <token type="KeywordReserved"/>
+ <push state="module"/>
+ </rule>
+ <rule pattern="\berror\b">
+ <token type="NameException"/>
+ </rule>
+ <rule pattern="\b(case|class|data|default|deriving|do|else|family|if|in|infix[lr]?|instance|let|newtype|of|then|type|where|_)(?!\')\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="'[^\\]'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="^[_\p{Ll}][\w\']*">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="'?[_\p{Ll}][\w']*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="('')?[\p{Lu}][\w\']*">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(')[\p{Lu}][\w\']*">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(')\[[^\]]*\]">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(')\([^)]*\)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\\(?![:!#$%&*+.\\/<=>?@^|~-]+)">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(<-|::|->|=>|=|'([:!#$%&*+.\\/<=>?@^|~-]+))(?![:!#$%&*+.\\/<=>?@^|~-]+)">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern=":[:!#$%&*+.\\/<=>?@^|~-]*">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[:!#$%&*+.\\/<=>?@^|~-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\d+_*[eE][+-]?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+(_+[\d]+)*\.\d+(_+[\d]+)*([eE][+-]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[oO](_*[0-7])+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX](_*[\da-fA-F])+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[bB](_*[01])+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="\d+(_*[\d])*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringChar"/>
+ <push state="character"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\[\]">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\(\)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[][(),;`{}]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="qualified\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="([\p{Lu}][\w.]*)(\s+)(as)(\s+)([\p{Lu}][\w.]*)">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Name"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([\p{Lu}][\w.]*)(\s+)(hiding)(\s+)(\()">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="funclist"/>
+ </rule>
+ <rule pattern="([\p{Lu}][\w.]*)(\s+)(\()">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="funclist"/>
+ </rule>
+ <rule pattern="[\w.]+">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="module">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="([\p{Lu}][\w.]*)(\s+)(\()">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="funclist"/>
+ </rule>
+ <rule pattern="[\p{Lu}][\w.]*">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="funclist">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[\p{Lu}]\w*">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(_[\w\']+|[\p{Ll}][\w\']*)">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="--(?![!#$%&*+./<=>?@^|_~:\\]).*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\{-">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[:!#$%&*+.\\/<=>?@^|~-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="funclist" state="funclist"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="2"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^-{}]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\{-">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="-\}">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[-{}]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="character">
+ <rule pattern="[^\\']'">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralStringEscape"/>
+ <push state="escape"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralStringEscape"/>
+ <push state="escape"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,143 @@
+<lexer>
+ <config>
+ <name>HCL</name>
+ <alias>hcl</alias>
+ <filename>*.hcl</filename>
+ <mime_type>application/x-hcl</mime_type>
+ </config>
+ <rules>
+ <state name="punctuation">
+ <rule pattern="[\[\](),.]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="(".*")">
+ <bygroups>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="string"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule>
+ <include state="curly"/>
+ </rule>
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ </state>
+ <state name="basic">
+ <rule pattern="\b(false|true)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\s*/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\s*#.*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(.*?)(\s*)(=)">
+ <bygroups>
+ <token type="Name"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\b\w+\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="var_builtin"/>
+ </rule>
+ </state>
+ <state name="curly">
+ <rule pattern="\{">
+ <token type="TextPunctuation"/>
+ </rule>
+ <rule pattern="\}">
+ <token type="TextPunctuation"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule pattern="(\s+)(".*")(\s+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule>
+ <include state="curly"/>
+ </rule>
+ </state>
+ <state name="var_builtin">
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push/>
+ </rule>
+ <rule pattern="\b(element|concat|lookup|file|join)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule>
+ <include state="string"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,189 @@
+<lexer>
+ <config>
+ <name>Hexdump</name>
+ <alias>hexdump</alias>
+ </config>
+ <rules>
+ <state name="offset">
+ <rule pattern="^([0-9A-Ha-h]+)(:)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="offset-mode"/>
+ </rule>
+ <rule pattern="^[0-9A-Ha-h]+">
+ <token type="NameLabel"/>
+ </rule>
+ </state>
+ <state name="offset-mode">
+ <rule pattern="\s">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[0-9A-Ha-h]+">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="piped-strings">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule>
+ <include state="offset"/>
+ </rule>
+ <rule pattern="[0-9A-Ha-h]{2}">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(\s{2,3})(\|)(.{1,16})(\|)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\s">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^\*">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="bracket-strings">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule>
+ <include state="offset"/>
+ </rule>
+ <rule pattern="[0-9A-Ha-h]{2}">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(\s{2,3})(\>)(.{1,16})(\<)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\s">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^\*">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="nonpiped-strings">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule>
+ <include state="offset"/>
+ </rule>
+ <rule pattern="([0-9A-Ha-h]{2})(\-)([0-9A-Ha-h]{2})">
+ <bygroups>
+ <token type="LiteralNumberHex"/>
+ <token type="Punctuation"/>
+ <token type="LiteralNumberHex"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[0-9A-Ha-h]{2}">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(\s{19,})(.{1,20}?)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s{2,3})(.{1,20})$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\s">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^\*">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule>
+ <include state="offset"/>
+ </rule>
+ <rule pattern="([0-9A-Ha-h]{2})(\-)([0-9A-Ha-h]{2})">
+ <bygroups>
+ <token type="LiteralNumberHex"/>
+ <token type="Punctuation"/>
+ <token type="LiteralNumberHex"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[0-9A-Ha-h]{2}">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(\s{2,3})(\>)(.{16})(\<)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="bracket-strings"/>
+ </rule>
+ <rule pattern="(\s{2,3})(\|)(.{16})(\|)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="piped-strings"/>
+ </rule>
+ <rule pattern="(\s{2,3})(\>)(.{1,15})(\<)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s{2,3})(\|)(.{1,15})(\|)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s{2,3})(.{1,15})$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s{2,3})(.{16}|.{20})$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="nonpiped-strings"/>
+ </rule>
+ <rule pattern="\s">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^\*">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,149 @@
+<lexer>
+ <config>
+ <name>HLB</name>
+ <alias>hlb</alias>
+ <filename>*.hlb</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(#.*)">
+ <bygroups>
+ <token type="CommentSingle"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))">
+ <bygroups>
+ <token type="LiteralNumber"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((\b(true|false)\b))">
+ <bygroups>
+ <token type="NameBuiltin"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)">
+ <bygroups>
+ <token type="KeywordType"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\b[a-zA-Z_][a-zA-Z0-9]*\b)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="params"/>
+ </rule>
+ <rule pattern="(\{)">
+ <bygroups>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="block"/>
+ </rule>
+ <rule pattern="(\n|\r|\r\n)">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="block">
+ <rule pattern="(\})">
+ <bygroups>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(#.*)">
+ <bygroups>
+ <token type="CommentSingle"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))">
+ <bygroups>
+ <token type="LiteralNumber"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((\b(true|false)\b))">
+ <bygroups>
+ <token type="KeywordConstant"/>
+ </bygroups>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(with)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(as)([\t ]+)(\b[a-zA-Z_][a-zA-Z0-9]*\b)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)([\t ]+)(\{)">
+ <bygroups>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="block"/>
+ </rule>
+ <rule pattern="(?!\b(?:scratch|image|resolve|http|checksum|chmod|filename|git|keepGitDir|local|includePatterns|excludePatterns|followPaths|generate|frontendInput|shell|run|readonlyRootfs|env|dir|user|network|security|host|ssh|secret|mount|target|localPath|uid|gid|mode|readonly|tmpfs|sourcePath|cache|mkdir|createParents|chown|createdTime|mkfile|rm|allowNotFound|allowWildcards|copy|followSymlinks|contentsOnly|unpack|createDestPath)\b)(\b[a-zA-Z_][a-zA-Z0-9]*\b)">
+ <bygroups>
+ <token type="NameOther"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\n|\r|\r\n)">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="params">
+ <rule pattern="(\))">
+ <bygroups>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(variadic)">
+ <bygroups>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)">
+ <bygroups>
+ <token type="KeywordType"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\b[a-zA-Z_][a-zA-Z0-9]*\b)">
+ <bygroups>
+ <token type="NameOther"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\n|\r|\r\n)">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,110 @@
+<lexer>
+ <config>
+ <name>HLSL</name>
+ <alias>hlsl</alias>
+ <filename>*.hlsl</filename>
+ <filename>*.hlsli</filename>
+ <filename>*.cginc</filename>
+ <filename>*.fx</filename>
+ <filename>*.fxh</filename>
+ <mime_type>text/x-hlsl</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^#.*$">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="//.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[?:]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\bdefined\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[;{}(),.\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[+-]?\d*\.\d+([eE][-+]?\d+)?f?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+-]?\d+\.\d*([eE][-+]?\d+)?f?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[0-7]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[1-9][0-9]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\b(asm|asm_fragment|break|case|cbuffer|centroid|class|column_major|compile|compile_fragment|const|continue|default|discard|do|else|export|extern|for|fxgroup|globallycoherent|groupshared|if|in|inline|inout|interface|line|lineadj|linear|namespace|nointerpolation|noperspective|NULL|out|packoffset|pass|pixelfragment|point|precise|return|register|row_major|sample|sampler|shared|stateblock|stateblock_state|static|struct|switch|tbuffer|technique|technique10|technique11|texture|typedef|triangle|triangleadj|uniform|vertexfragment|volatile|while)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="\b(auto|catch|char|const_cast|delete|dynamic_cast|enum|explicit|friend|goto|long|mutable|new|operator|private|protected|public|reinterpret_cast|short|signed|sizeof|static_cast|template|this|throw|try|typename|union|unsigned|using|virtual)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="\b(dword|matrix|snorm|string|unorm|unsigned|void|vector|BlendState|Buffer|ByteAddressBuffer|ComputeShader|DepthStencilState|DepthStencilView|DomainShader|GeometryShader|HullShader|InputPatch|LineStream|OutputPatch|PixelShader|PointStream|RasterizerState|RenderTargetView|RasterizerOrderedBuffer|RasterizerOrderedByteAddressBuffer|RasterizerOrderedStructuredBuffer|RasterizerOrderedTexture1D|RasterizerOrderedTexture1DArray|RasterizerOrderedTexture2D|RasterizerOrderedTexture2DArray|RasterizerOrderedTexture3D|RWBuffer|RWByteAddressBuffer|RWStructuredBuffer|RWTexture1D|RWTexture1DArray|RWTexture2D|RWTexture2DArray|RWTexture3D|SamplerState|SamplerComparisonState|StructuredBuffer|Texture1D|Texture1DArray|Texture2D|Texture2DArray|Texture2DMS|Texture2DMSArray|Texture3D|TextureCube|TextureCubeArray|TriangleStream|VertexShader)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\b(bool|double|float|int|half|min16float|min10float|min16int|min12int|min16uint|uint)([1-4](x[1-4])?)?\b">
+ <token type="KeywordType"/>
+ </rule>
@@ -0,0 +1,252 @@
+<lexer>
+ <config>
+ <name>HolyC</name>
+ <alias>holyc</alias>
+ <filename>*.HC</filename>
+ <filename>*.hc</filename>
+ <filename>*.HH</filename>
+ <filename>*.hh</filename>
+ <filename>*.hc.z</filename>
+ <filename>*.HC.Z</filename>
+ <mime_type>text/x-chdr</mime_type>
+ <mime_type>text/x-csrc</mime_type>
+ <mime_type>image/x-xbitmap</mime_type>
+ <mime_type>image/x-xpixmap</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="statement">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="macro">
+ <rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="CommentPreprocFile"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^/\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=\\)\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="if0">
+ <rule pattern="^\s*#if.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <push/>
+ </rule>
+ <rule pattern="^\s*#el(?:se|if).*\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="^\s*#endif.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".*?\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="^#if\s+0">
+ <token type="CommentPreproc"/>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^#">
+ <token type="CommentPreproc"/>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(\n|[\w\W]*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="statements">
+ <rule pattern="(L?)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+[LlUu]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[0-7]+[LlUu]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="\d+[LlUu]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(break|case|continue|default|do|else|for|goto|if|return|switch|while|throw|try|catch|extern|MOV|CALL|PUSH|LEAVE|RET|SUB|SHR|ADD|RETF|CMP|JNE|BTS|INT|XOR|JC|JZ|LOOP|POP|TEST|SHL|ADC|SBB|JMP|INC)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(U0|I8|U8|I16|U16|I32|U32|I64|U64|F64|Bool|class|union|DU8|DU16|DU32|DU64|RAX|RCX|RDX|RBX|RSP|RBP|RSI|RDI|EAX|ECX|EDX|EBX|ESP|EBP|ESI|EDI|AX|CX|DX|BX|SP|BP|SI|DI|SS|CS|DS|ES|FS|GS|CH|asm|const|extern|register|restrict|static|volatile|inline|_extern|_import|IMPORT|public)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="__()\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(NULL|TRUE|FALSE|ON|OFF)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(:)(?!:)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b[A-Za-z_]\w*(?=\s*\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="function"/>
+ </rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <push state="statement"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,159 @@
+<lexer>
+ <config>
+ <name>HTML</name>
+ <alias>html</alias>
+ <filename>*.html</filename>
+ <filename>*.htm</filename>
+ <filename>*.xhtml</filename>
+ <filename>*.xslt</filename>
+ <mime_type>text/html</mime_type>
+ <mime_type>application/xhtml+xml</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <dot_all>true</dot_all>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="script-content">
+ <rule pattern="(<)(\s*)(/)(\s*)(script)(\s*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".+?(?=<\s*/\s*script\s*>)">
+ <using lexer="Javascript"/>
+ </rule>
+ </state>
+ <state name="style-content">
+ <rule pattern="(<)(\s*)(/)(\s*)(style)(\s*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".+?(?=<\s*/\s*style\s*>)">
+ <using lexer="CSS"/>
+ </rule>
+ </state>
+ <state name="attr">
+ <rule pattern="".*?"">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="'.*?'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\s>]+">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[^<&]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="&\S*?;">
+ <token type="NameEntity"/>
+ </rule>
+ <rule pattern="\<\!\[CDATA\[.*?\]\]\>">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="<\?.*?\?>">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="<![^>]*>">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(<)(\s*)(script)(\s*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="script-content" state="tag"/>
+ </rule>
+ <rule pattern="(<)(\s*)(style)(\s*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="style-content" state="tag"/>
+ </rule>
+ <rule pattern="(<)(\s*)([\w:.-]+)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="(<)(\s*)(/)(\s*)([\w:.-]+)(\s*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^-]+">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="-->">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="-">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="tag">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="([\w:-]+\s*)(=)(\s*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="attr"/>
+ </rule>
+ <rule pattern="[\w:-]+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(/?)(\s*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,104 @@
+<lexer>
+ <config>
+ <name>Hy</name>
+ <alias>hylang</alias>
+ <filename>*.hy</filename>
+ <mime_type>text/x-hy</mime_type>
+ <mime_type>application/x-hy</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern=";.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="[,\s]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="-?\d+\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="0[0-7]+j?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][a-fA-F0-9]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'(?!#)[\w!$%*+<=>?/.#-]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="\\(.|[a-z]+)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringDoc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringDoc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="::?(?!#)[\w!$%*+<=>?/.#-]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="~@|[`\'#^~&@]">
+ <token type="Operator"/>
+ </rule>
+ <rule>
+ <include state="py-keywords"/>
+ </rule>
+ <rule>
+ <include state="py-builtins"/>
+ </rule>
+ <rule pattern="(eval-when-compile|eval-and-compile|with-decorator|unquote-splice|quasiquote|list_comp|unquote|foreach|kwapply|import|not-in|unless|is-not|quote|progn|slice|assoc|first|while|when|rest|cond|<<=|->>|for|get|>>=|let|cdr|car|is|->|do|in|\||~|,) ">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(defmacro|defclass|lambda|defun|defn|setv|def|fn) ">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(repeatedly|take_while|iterator\?|iterable\?|instance\?|distinct|take_nth|numeric\?|iterate|filter|repeat|remove|even\?|none\?|cycle|zero\?|odd\?|pos\?|neg\?|take|drop|inc|dec|nth) ">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<=\()(?!#)[\w!$%*+<=>?/.#-]+">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(?!#)[\w!$%*+<=>?/.#-]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(\[|\])">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(\{|\})">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(\(|\))">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="py-keywords">
+ <rule pattern="(yield from|continue|finally|lambda|assert|global|except|return|print|yield|while|break|raise|elif|pass|exec|else|with|try|for|del|as|if)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="py-builtins">
+ <rule pattern="(?<!\.)(staticmethod|classmethod|__import__|isinstance|basestring|issubclass|frozenset|raw_input|bytearray|enumerate|property|callable|reversed|execfile|hasattr|setattr|compile|complex|delattr|unicode|globals|getattr|unichr|reduce|xrange|buffer|intern|filter|locals|divmod|coerce|sorted|reload|object|slice|round|float|super|input|bytes|apply|tuple|range|iter|dict|long|type|hash|vars|next|file|exit|open|repr|eval|bool|list|bin|pow|zip|ord|oct|min|set|any|max|map|all|len|sum|int|dir|hex|chr|abs|cmp|str|id)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="(?<!\.)(PendingDeprecationWarning|UnicodeTranslateError|NotImplementedError|UnicodeEncodeError|UnicodeDecodeError|DeprecationWarning|FloatingPointError|UnboundLocalError|KeyboardInterrupt|ZeroDivisionError|EnvironmentError|IndentationError|ArithmeticError|OverflowWarning|ReferenceError|RuntimeWarning|AttributeError|AssertionError|NotImplemented|UnicodeWarning|FutureWarning|BaseException|StopIteration|SyntaxWarning|OverflowError|StandardError|ImportWarning|GeneratorExit|RuntimeError|WindowsError|UnicodeError|LookupError|SyntaxError|SystemError|ImportError|MemoryError|UserWarning|ValueError|IndexError|SystemExit|Exception|TypeError|NameError|EOFError|VMSError|KeyError|TabError|IOError|OSError|Warning)\b">
+ <token type="NameException"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,216 @@
+<lexer>
+ <config>
+ <name>Idris</name>
+ <alias>idris</alias>
+ <alias>idr</alias>
+ <filename>*.idr</filename>
+ <mime_type>text/x-idris</mime_type>
+ </config>
+ <rules>
+ <state name="escape">
+ <rule pattern="[abfnrtv"\'&\\]">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\^[][A-Z@^_]">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="NUL|SOH|[SE]TX|EOT|ENQ|ACK|BEL|BS|HT|LF|VT|FF|CR|S[OI]|DLE|DC[1-4]|NAK|SYN|ETB|CAN|EM|SUB|ESC|[FGRU]S|SP|DEL">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="o[0-7]+">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="x[\da-fA-F]+">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s+\\">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^(\s*)(%lib|link|flag|include|hide|freeze|access|default|logging|dynamic|name|error_handlers|language)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="KeywordReserved"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(--(?![!#$%&*+./<=>?@^|_~:\\]).*?)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentSingle"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(\|{3}.*?)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentSingle"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(\{-)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentMultiline"/>
+ </bygroups>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="^(\s*)([^\s(){}]+)(\s*)(:)(\s*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="OperatorWord"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(case|class|data|default|using|do|else|if|in|infix[lr]?|instance|rewrite|auto|namespace|codata|mutual|private|public|abstract|total|partial|let|proof|of|then|static|where|_|with|pattern|term|syntax|prefix|postulate|parameters|record|dsl|impossible|implicit|tactics|intros|intro|compute|refine|exact|trivial)(?!\')\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(import|module)(\s+)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="module"/>
+ </rule>
+ <rule pattern="('')?[A-Z][\w\']*">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[a-z][\w\']*">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(<-|::|->|=>|=)">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="([(){}\[\]:!#$%&*+.\\/<=>?@^|~-]+)">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="\d+[eE][+-]?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+\.\d+([eE][+-]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX][\da-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringChar"/>
+ <push state="character"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[^\s(){}]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+?">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="module">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="([A-Z][\w.]*)(\s+)(\()">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="funclist"/>
+ </rule>
+ <rule pattern="[A-Z][\w.]*">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="funclist">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[A-Z]\w*">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(_[\w\']+|[a-z][\w\']*)">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="--.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\{-">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[:!#$%&*+.\\/<=>?@^|~-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="funclist" state="funclist"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="2"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^-{}]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\{-">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="-\}">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[-{}]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="character">
+ <rule pattern="[^\\']">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralStringEscape"/>
+ <push state="escape"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralStringEscape"/>
+ <push state="escape"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,47 @@
+<lexer>
+ <config>
+ <name>Igor</name>
+ <alias>igor</alias>
+ <alias>igorpro</alias>
+ <filename>*.ipf</filename>
+ <mime_type>text/ipf</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="//.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern=""([^"\\]|\\.)*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\b(AbortOnValue|AbortOnRTE|strswitch|endswitch|continue|default|endfor|endtry|switch|return|elseif|while|catch|endif|break|else|case|for|try|do|if)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(strconstant|constant|variable|funcref|string|uint64|uint32|uint16|STRUCT|double|dfref|uchar|int16|int32|int64|float|WAVE|SVAR|NVAR|char)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\b(EndStructure|MultiThread|ThreadSafe|Structure|EndMacro|function|DoPrompt|override|Picture|SubMenu|window|Prompt|static|macro|Proc|Menu|end)\b">
+ <token type="KeywordReserved"/>
+ </rule>
@@ -0,0 +1,45 @@
+<lexer>
+ <config>
+ <name>INI</name>
+ <alias>ini</alias>
+ <alias>cfg</alias>
+ <alias>dosini</alias>
+ <filename>*.ini</filename>
+ <filename>*.cfg</filename>
+ <filename>*.inf</filename>
+ <filename>*.service</filename>
+ <filename>*.socket</filename>
+ <filename>.gitconfig</filename>
+ <filename>.editorconfig</filename>
+ <filename>pylintrc</filename>
+ <filename>.pylintrc</filename>
+ <mime_type>text/x-ini</mime_type>
+ <mime_type>text/inf</mime_type>
+ <priority>0.1</priority> <!-- higher priority than Inform 6 -->
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[;#].*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\[.*?\]$">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(.+?)$">
+ <token type="NameAttribute"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,71 @@
+<lexer>
+ <config>
+ <name>Io</name>
+ <alias>io</alias>
+ <filename>*.io</filename>
+ <mime_type>text/x-iosrc</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(.*?)\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="#(.*?)\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\+">
+ <token type="CommentMultiline"/>
+ <push state="nestedcomment"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(clone|do|doFile|doString|method|for|if|else|elseif|then)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(nil|false|true)\b">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="(Object|list|List|Map|args|Sequence|Coroutine|File)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="nestedcomment">
+ <rule pattern="[^+/]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\+">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\+/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[+/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,96 @@
+<lexer>
+ <config>
+ <name>ISCdhcpd</name>
+ <alias>iscdhcpd</alias>
+ <filename>dhcpd.conf</filename>
+ </config>
+ <rules>
+ <state name="interpol">
+ <rule pattern="\$[{(]">
+ <token type="LiteralStringInterpol"/>
+ <push/>
+ </rule>
+ <rule pattern="[})]">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^${()}]+">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="#.*?\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(hardware|packet|leased-address|host-decl-name|lease-time|max-lease-time|client-state|config-option|option|filename|next-server|allow|deny|match|ignore)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(include|group|host|subnet|subnet6|netmask|class|subclass|pool|failover|include|shared-network|range|range6|prefix6)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(on|off|true|false|none)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(if|elsif|else)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(exists|known|static)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(and|or|not)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(==|!=|~=|~~|=)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{},;\)]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\/\d{1,2}">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[a-fA-F0-9]{1,2}:[a-fA-F0-9]{1,2}:[a-fA-F0-9]{1,2}:[a-fA-F0-9]{1,2}:[a-fA-F0-9]{1,2}:[a-fA-F0-9]{1,2}">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="doublequotestring"/>
+ </rule>
+ <rule pattern="([\w\-.]+)(\s*)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[\w\-.]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="doublequotestring">
+ <rule pattern="\$[{(]">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpol"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,157 @@
+<lexer>
+ <config>
+ <name>J</name>
+ <alias>j</alias>
+ <filename>*.ijs</filename>
+ <mime_type>text/x-j</mime_type>
+ </config>
+ <rules>
+ <state name="singlequote">
+ <rule pattern="[^']">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="''">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="#!.*$">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="NB\..*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\n+\s*Note">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\s*Note.*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="singlequote"/>
+ </rule>
+ <rule pattern="0\s+:\s*0|noun\s+define\s*$">
+ <token type="NameEntity"/>
+ <push state="nounDefinition"/>
+ </rule>
+ <rule pattern="(([1-4]|13)\s+:\s*0|(adverb|conjunction|dyad|monad|verb)\s+define)\b">
+ <token type="NameFunction"/>
+ <push state="explicitDefinition"/>
+ </rule>
+ <rule pattern="(label_|goto_|for_)\b[a-zA-Z]\w*\.">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="(continue|select|return|assert|catchd|catcht|elseif|whilst|break|catch|fcase|while|throw|else|case|end|try|for|do|if)\.">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="\b[a-zA-Z]\w*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(timespacex|fixdotdot|nameclass|namelist|file2url|tmoutput|ucpcount|boxxopen|smoutput|JVERSION|datatype|toupper|tolower|alpha17|alpha27|getargs|evtloop|boxopen|fliprgb|inverse|scriptd|iospath|cutopen|isatty|toCRLF|toHOST|isutf8|getenv|stdout|script|usleep|sminfo|expand|stderr|clear|fetch|every|erase|empty|Debug|EMPTY|split|names|timex|cutLF|stdin|apply|items|table|exit|Note|list|take|leaf|type|bind|drop|rows|each|echo|sign|CRLF|utf8|sort|pick|ARGV|uucp|ucp|DEL|inv|hfd|dfh|def|LF2|EAV|toJ|TAB|nl|FF|LF|bx|nc|CR|on)">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="=[.:]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[-=+*#$%@!~`^&";:.,<>{}\[\]\\|/]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[abCdDeEfHiIjLMoprtT]\.">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="[aDiLpqsStux]\:">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(_[0-9])\:">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="parentheses"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^)]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="^\)">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[)]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="explicitDefinition">
+ <rule pattern="\b[nmuvxy]\b">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ <rule pattern="[^)]">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="^\)">
+ <token type="NameLabel"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[)]">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="\b_{1,2}\b">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="_?\d+(\.\d+)?(\s*[ejr]\s*)_?\d+(\.?=\d+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="_?\d+\.(?=\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="_?\d+x">
+ <token type="LiteralNumberIntegerLong"/>
+ </rule>
+ <rule pattern="_?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="nounDefinition">
+ <rule pattern="[^)]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="^\)">
+ <token type="NameLabel"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[)]">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="parentheses">
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="explicitDefinition"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,193 @@
+<lexer>
+ <config>
+ <name>Java</name>
+ <alias>java</alias>
+ <filename>*.java</filename>
+ <mime_type>text/x-java</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(^\s*)((?:(?:public|private|protected|static|strictfp)(?:\s+))*)(record)\b">
+ <bygroups>
+ <token type="TextWhitespace" />
+ <usingself state="root" />
+ <token type="KeywordDeclaration" />
+ </bygroups>
+ <push state="class" />
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="TextWhitespace" />
+ </rule>
+ <rule pattern="(//.*?)(\n)">
+ <bygroups>
+ <token type="CommentSingle" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline" />
+ </rule>
+ <rule
+ pattern="(assert|break|case|catch|continue|default|do|else|finally|for|if|goto|instanceof|new|return|switch|this|throw|try|while)\b">
+ <token type="Keyword" />
+ </rule>
+ <rule pattern="((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()">
+ <bygroups>
+ <usingself state="root" />
+ <token type="NameFunction" />
+ <token type="TextWhitespace" />
+ <token type="Punctuation" />
+ </bygroups>
+ </rule>
+ <rule pattern="@[^\W\d][\w.]*">
+ <token type="NameDecorator" />
+ </rule>
+ <rule
+ pattern="(abstract|const|enum|extends|final|implements|native|private|protected|public|sealed|static|strictfp|super|synchronized|throws|transient|volatile|yield)\b">
+ <token type="KeywordDeclaration" />
+ </rule>
+ <rule pattern="(boolean|byte|char|double|float|int|long|short|void)\b">
+ <token type="KeywordType" />
+ </rule>
+ <rule pattern="(package)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ <push state="import" />
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant" />
+ </rule>
+ <rule pattern="(class|interface)\b">
+ <token type="KeywordDeclaration" />
+ <push state="class" />
+ </rule>
+ <rule pattern="(var)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ <push state="var" />
+ </rule>
+ <rule pattern="(import(?:\s+static)?)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ <push state="import" />
+ </rule>
+ <rule pattern=""""\n">
+ <token type="LiteralString" />
+ <push state="multiline_string" />
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString" />
+ <push state="string" />
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'">
+ <token type="LiteralStringChar" />
+ </rule>
+ <rule pattern="(\.)((?:[^\W\d]|\$)[\w$]*)">
+ <bygroups>
+ <token type="Punctuation" />
+ <token type="NameAttribute" />
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)(default)(:)">
+ <bygroups>
+ <token type="TextWhitespace" />
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)((?:[^\W\d]|\$)[\w$]*)(:)">
+ <bygroups>
+ <token type="TextWhitespace" />
+ <token type="NameLabel" />
+ <token type="Punctuation" />
+ </bygroups>
+ </rule>
+ <rule pattern="([^\W\d]|\$)[\w$]*">
+ <token type="Name" />
+ </rule>
+ <rule
+ pattern="([0-9][0-9_]*\.([0-9][0-9_]*)?|\.[0-9][0-9_]*)([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)[pP][+\-]?[0-9][0-9_]*[fFdD]?">
+ <token type="LiteralNumberFloat" />
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?">
+ <token type="LiteralNumberHex" />
+ </rule>
+ <rule pattern="0[bB][01][01_]*[lL]?">
+ <token type="LiteralNumberBin" />
+ </rule>
+ <rule pattern="0[0-7_]+[lL]?">
+ <token type="LiteralNumberOct" />
+ </rule>
+ <rule pattern="0|[1-9][0-9_]*[lL]?">
+ <token type="LiteralNumberInteger" />
+ </rule>
+ <rule pattern="[~^*!%&\[\]<>|+=/?-]">
+ <token type="Operator" />
+ </rule>
+ <rule pattern="[{}();:.,]">
+ <token type="Punctuation" />
+ </rule>
+ <rule pattern="\n">
+ <token type="TextWhitespace" />
+ </rule>
+ </state>
+ <state name="class">
+ <rule pattern="\s+">
+ <token type="Text" />
+ </rule>
+ <rule pattern="([^\W\d]|\$)[\w$]*">
+ <token type="NameClass" />
+ <pop depth="1" />
+ </rule>
+ </state>
+ <state name="var">
+ <rule pattern="([^\W\d]|\$)[\w$]*">
+ <token type="Name" />
+ <pop depth="1" />
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="[\w.]+\*?">
+ <token type="NameNamespace" />
+ <pop depth="1" />
+ </rule>
+ </state>
+ <state name="multiline_string">
+ <rule pattern=""""">
+ <token type="LiteralString" />
+ <pop depth="1" />
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString" />
+ </rule>
+ <rule>
+ <include state="string" />
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString" />
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralString" />
+ </rule>
+ <rule pattern="\\"">
+ <token type="LiteralString" />
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString" />
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString" />
+ <pop depth="1" />
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,160 @@
+<lexer>
+ <config>
+ <name>JavaScript</name>
+ <alias>js</alias>
+ <alias>javascript</alias>
+ <filename>*.js</filename>
+ <filename>*.jsm</filename>
+ <filename>*.mjs</filename>
+ <filename>*.cjs</filename>
+ <mime_type>application/javascript</mime_type>
+ <mime_type>application/x-javascript</mime_type>
+ <mime_type>text/x-javascript</mime_type>
+ <mime_type>text/javascript</mime_type>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="interp">
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\\`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\\[^`\\]">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interp-inside"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="[^`\\$]+">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ <state name="interp-inside">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gimuy]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=/)">
+ <token type="Text"/>
+ <push state="#pop" state="badregex"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="badregex">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\A#! ?/.*?\n">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="^(?=\s|/|<!--)">
+ <token type="Text"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="\d+(\.\d*|[eE][+\-]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[bB][01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[oO][0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9][0-9_]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\.\.\.|=>">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|this|of)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(var|let|with|function)\b">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(abstract|async|await|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false|null|NaN|Infinity|undefined)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|Promise|Proxy|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|document|this|window)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?:[$_\p{L}\p{N}]|\\u[a-fA-F0-9]{4})(?:(?:[$\p{L}\p{N}]|\\u[a-fA-F0-9]{4}))*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <push state="interp"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,112 @@
+<lexer>
+ <config>
+ <name>JSON</name>
+ <alias>json</alias>
+ <filename>*.json</filename>
+ <filename>*.jsonc</filename>
+ <filename>*.avsc</filename>
+ <mime_type>application/json</mime_type>
+ <dot_all>true</dot_all>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule>
+ <include state="value"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="simplevalue">
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="-?(0|[1-9]\d*)(\.\d+[eE](\+|-)?\d+|[eE](\+|-)?\d+|\.\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?(0|[1-9]\d*)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="objectattribute">
+ <rule>
+ <include state="value"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="2"/>
+ </rule>
+ </state>
+ <state name="objectvalue">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comment"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="NameTag"/>
+ <push state="objectattribute"/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="arrayvalue">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="value"/>
+ </rule>
+ <rule>
+ <include state="comment"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="value">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="simplevalue"/>
+ </rule>
+ <rule>
+ <include state="comment"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="objectvalue"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push state="arrayvalue"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,83 @@
+<lexer>
+ <config>
+ <name>JSONata</name>
+ <alias>jsonata</alias>
+ <filename>*.jsonata</filename>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="[{}()\[\]:;,\.=]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\.\."> // Spread operator
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\^(?=\()"> // Sort operator
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\*\*|\*(?=\.)|\*"> // Descendant | Wildcard | Multiplication
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\/(?!\*)"> // Division
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[<>!]=?"> // Comparison operators
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="~>">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(and|or|in)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[%@#&?]|\+(?!\d)|\-(?!\d)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\$[a-zA-Z0-9_]*(?![\w\(])">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\$\w*(?=\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="\b(function)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(\+|-)?(0|[1-9]\d*)(\.\d+[eE](\+|-)?\d+|[eE](\+|-)?\d+|\.\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\+|-)?(0|[1-9]\d*)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <!-- NOTE: This expression matches all object keys (NameTags), which are essentially strings with double quotes
+ that should only be captured on the left side of a colon (:) within a JSON-like object.
+ Therefore, this expression must preceed the one for all LiteralStringDouble -->
+ <rule pattern=""(\\.|[^\\"\r\n])*"(?=\s*:)">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="`.*`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <!-- NOTE: This expression matches everything remaining, which should be only JSONata names.
+ Therefore, it has been left as last intentionally -->
+ <rule pattern="[a-zA-Z0-9_]*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,138 @@
+
+<lexer>
+ <config>
+ <name>Jsonnet</name>
+ <alias>jsonnet</alias>
+ <filename>*.jsonnet</filename>
+ <filename>*.libsonnet</filename>
+ </config>
+ <rules>
+ <state name="_comments">
+ <rule pattern="(//|#).*\n"><token type="CommentSingle"/></rule>
+ <rule pattern="/\*\*([^/]|/(?!\*))*\*/"><token type="LiteralStringDoc"/></rule>
+ <rule pattern="/\*([^/]|/(?!\*))*\*/"><token type="Comment"/></rule>
+ </state>
+ <state name="root">
+ <rule><include state="_comments"/></rule>
+ <rule pattern="@'.*'"><token type="LiteralString"/></rule>
+ <rule pattern="@".*""><token type="LiteralString"/></rule>
+ <rule pattern="'"><token type="LiteralString"/><push state="singlestring"/></rule>
+ <rule pattern="""><token type="LiteralString"/><push state="doublestring"/></rule>
+ <rule pattern="\|\|\|(.|\n)*\|\|\|"><token type="LiteralString"/></rule>
+ <rule pattern="[+-]?[0-9]+(.[0-9])?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="[!$~+\-&|^=<>*/%]"><token type="Operator"/></rule>
+ <rule pattern="\{"><token type="Punctuation"/><push state="object"/></rule>
+ <rule pattern="\["><token type="Punctuation"/><push state="array"/></rule>
+ <rule pattern="local\b"><token type="Keyword"/><push state="local_name"/></rule>
+ <rule pattern="assert\b"><token type="Keyword"/><push state="assert"/></rule>
+ <rule pattern="(assert|else|error|false|for|if|import|importstr|in|null|tailstrict|then|self|super|true)\b"><token type="Keyword"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="function(?=\()"><token type="Keyword"/><push state="function_params"/></rule>
+ <rule pattern="std\.[^\W\d]\w*(?=\()"><token type="NameBuiltin"/><push state="function_args"/></rule>
+ <rule pattern="[^\W\d]\w*(?=\()"><token type="NameFunction"/><push state="function_args"/></rule>
+ <rule pattern="[^\W\d]\w*"><token type="NameVariable"/></rule>
+ <rule pattern="[\.()]"><token type="Punctuation"/></rule>
+ </state>
+ <state name="singlestring">
+ <rule pattern="[^'\\]"><token type="LiteralString"/></rule>
+ <rule pattern="\\."><token type="LiteralStringEscape"/></rule>
+ <rule pattern="'"><token type="LiteralString"/><pop depth="1"/></rule>
+ </state>
+ <state name="doublestring">
+ <rule pattern="[^"\\]"><token type="LiteralString"/></rule>
+ <rule pattern="\\."><token type="LiteralStringEscape"/></rule>
+ <rule pattern="""><token type="LiteralString"/><pop depth="1"/></rule>
+ </state>
+ <state name="array">
+ <rule pattern=","><token type="Punctuation"/></rule>
+ <rule pattern="\]"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ <state name="local_name">
+ <rule pattern="[^\W\d]\w*(?=\()"><token type="NameFunction"/><push state="function_params"/></rule>
+ <rule pattern="[^\W\d]\w*"><token type="NameVariable"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="(?==)"><token type="TextWhitespace"/><push state="#pop" state="local_value"/></rule>
+ </state>
+ <state name="local_value">
+ <rule pattern="="><token type="Operator"/></rule>
+ <rule pattern=";"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ <state name="assert">
+ <rule pattern=":"><token type="Punctuation"/></rule>
+ <rule pattern=";"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ <state name="function_params">
+ <rule pattern="[^\W\d]\w*"><token type="NameVariable"/></rule>
+ <rule pattern="\("><token type="Punctuation"/></rule>
+ <rule pattern="\)"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern=","><token type="Punctuation"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="="><token type="Operator"/><push state="function_param_default"/></rule>
+ </state>
+ <state name="function_args">
+ <rule pattern="\("><token type="Punctuation"/></rule>
+ <rule pattern="\)"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern=","><token type="Punctuation"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ <state name="object">
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="local\b"><token type="Keyword"/><push state="object_local_name"/></rule>
+ <rule pattern="assert\b"><token type="Keyword"/><push state="object_assert"/></rule>
+ <rule pattern="\["><token type="Operator"/><push state="field_name_expr"/></rule>
+ <rule pattern="(?=[^\W\d]\w*)"><token type="Text"/><push state="field_name"/></rule>
+ <rule pattern="\}"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern="""><token type="NameVariable"/><push state="double_field_name"/></rule>
+ <rule pattern="'"><token type="NameVariable"/><push state="single_field_name"/></rule>
+ <rule><include state="_comments"/></rule>
+ </state>
+ <state name="field_name">
+ <rule pattern="[^\W\d]\w*(?=\()"><token type="NameFunction"/><push state="field_separator" state="function_params"/></rule>
+ <rule pattern="[^\W\d]\w*"><token type="NameVariable"/><push state="field_separator"/></rule>
+ </state>
+ <state name="double_field_name">
+ <rule pattern="([^"\\]|\\.)*""><token type="NameVariable"/><push state="field_separator"/></rule>
+ </state>
+ <state name="single_field_name">
+ <rule pattern="([^'\\]|\\.)*'"><token type="NameVariable"/><push state="field_separator"/></rule>
+ </state>
+ <state name="field_name_expr">
+ <rule pattern="\]"><token type="Operator"/><push state="field_separator"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ <state name="function_param_default">
+ <rule pattern="(?=[,\)])"><token type="TextWhitespace"/><pop depth="1"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ <state name="field_separator">
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="\+?::?:?"><token type="Punctuation"/><push state="#pop" state="#pop" state="field_value"/></rule>
+ <rule><include state="_comments"/></rule>
+ </state>
+ <state name="field_value">
+ <rule pattern=","><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern="\}"><token type="Punctuation"/><pop depth="2"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ <state name="object_assert">
+ <rule pattern=":"><token type="Punctuation"/></rule>
+ <rule pattern=","><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ <state name="object_local_name">
+ <rule pattern="[^\W\d]\w*"><token type="NameVariable"/><push state="#pop" state="object_local_value"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ </state>
+ <state name="object_local_value">
+ <rule pattern="="><token type="Operator"/></rule>
+ <rule pattern=","><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern="\}"><token type="Punctuation"/><pop depth="2"/></rule>
+ <rule><include state="root"/></rule>
+ </state>
+ </rules>
+</lexer>
+
@@ -0,0 +1,400 @@
+<lexer>
+ <config>
+ <name>Julia</name>
+ <alias>julia</alias>
+ <alias>jl</alias>
+ <filename>*.jl</filename>
+ <mime_type>text/x-julia</mime_type>
+ <mime_type>application/x-julia</mime_type>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="(")((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*)|\d+)?">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="LiteralStringAffix"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="interp"/>
+ </rule>
+ <rule pattern="%[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="[^"$%\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="curly">
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="rawstring">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\"">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="([^"\\]|\\[^"])+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="tqcommand">
+ <rule pattern="(```)((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*)|\d+)?">
+ <bygroups>
+ <token type="LiteralStringBacktick"/>
+ <token type="LiteralStringAffix"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\$">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="interp"/>
+ </rule>
+ <rule pattern="[^\\`$]+">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ <state name="in-intp">
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="tqstring">
+ <rule pattern="(""")((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*)|\d+)?">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="LiteralStringAffix"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\"\'$nrbtfav]|(x|u|U)[a-fA-F0-9]+|\d+)">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="interp"/>
+ </rule>
+ <rule pattern="[^"$%\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="interp">
+ <rule pattern="\$(?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*)">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="(\$)(\()">
+ <bygroups>
+ <token type="LiteralStringInterpol"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="in-intp"/>
+ </rule>
+ </state>
+ <state name="tqregex">
+ <rule pattern="(""")([imsxa]*)?">
+ <bygroups>
+ <token type="LiteralStringRegex"/>
+ <token type="LiteralStringAffix"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^"]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#=">
+ <token type="CommentMultiline"/>
+ <push state="blockcomment"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="[\[\](),;]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*))(\s*)(:)((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*))">
+ <bygroups>
+ <token type="Name"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Name"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?<![\]):<>\d.])(:(?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*))">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="(?<=::)(\s*)((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*))\b(?![(\[])">
+ <bygroups>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*))(\s*)([<>]:)(\s*)((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*))\b(?![(\[])">
+ <bygroups>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([<>]:)(\s*)((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*))\b(?![(\[])">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b((?:[a-zA-Z_¡-][a-zA-Z_0-9!¡-]*))(\s*)([<>]:)">
+ <bygroups>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
@@ -0,0 +1,98 @@
+<lexer>
+ <config>
+ <name>Jungle</name>
+ <alias>jungle</alias>
+ <filename>*.jungle</filename>
+ <mime_type>text/x-jungle</mime_type>
+ </config>
+ <rules>
+ <state name="var">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\b(((re)?source|barrel)Path|excludeAnnotations|annotations|lang)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="\bbase\b">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\b(ind|zsm|hrv|ces|dan|dut|eng|fin|fre|deu|gre|hun|ita|nob|po[lr]|rus|sl[ov]|spa|swe|ara|heb|zh[st]|jpn|kor|tha|vie|bul|tur)">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\b((semi)?round|rectangle)(-\d+x\d+)?\b">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="[\.;\[\]\(\$]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#(\n|[\w\W]*?[^#]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="^(?=\S)">
+ <token type="None"/>
+ <push state="instruction"/>
+ </rule>
+ <rule pattern="[\.;\[\]\(\)\$]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="instruction">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="=">
+ <token type="Operator"/>
+ <push state="value"/>
+ </rule>
+ <rule pattern="(?=\S)">
+ <token type="None"/>
+ <push state="var"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="value">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\$\(">
+ <token type="Punctuation"/>
+ <push state="var"/>
+ </rule>
+ <rule pattern="[;\[\]\(\)\$]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="#(\n|[\w\W]*?[^#]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="[\w_\-\.\/\\]+">
+ <token type="Text"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,223 @@
+<lexer>
+ <config>
+ <name>Kotlin</name>
+ <alias>kotlin</alias>
+ <filename>*.kt</filename>
+ <mime_type>text/x-kotlin</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="\\[tbnr'"\\\$]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\u[0-9a-fA-F]{4}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-interpol"/>
+ </rule>
+ <rule pattern="[^\n\\"$]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="package">
+ <rule pattern="\S+">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="class">
+ <rule pattern="\x60[^\x60]+?\x60">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?:[_\p{L}][\p{L}\p{N}]*|`@?[_\p{L}][\p{L}\p{N}]+`)">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="property">
+ <rule pattern="\x60[^\x60]+?\x60">
+ <token type="NameProperty"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?:[_\p{L}][\p{L}\p{N}]*|`@?[_\p{L}][\p{L}\p{N}]+`)">
+ <token type="NameProperty"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string-interpol">
+ <rule pattern="\$(?:[_\p{L}][\p{L}\p{N}]*|`@?[_\p{L}][\p{L}\p{N}]+`)">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="\${[^}\n]*}">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ </state>
+ <state name="generics-specification">
+ <rule pattern="<">
+ <token type="Punctuation"/>
+ <push state="generics-specification"/>
+ </rule>
+ <rule pattern=">">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[,:*?]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(in|out|reified)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\x60[^\x60]+?\x60">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(?:[_\p{L}][\p{L}\p{N}]*|`@?[_\p{L}][\p{L}\p{N}]+`)">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^\s*\[.*?\]">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//[^\n]*\n?">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/[*].*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="!==|!in|!is|===">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="%=|&&|\*=|\+\+|\+=|--|-=|->|\.\.|\/=|::|<=|==|>=|!!|!=|\|\||\?[:.]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[~!%^&*()+=|\[\]:;,.<>\/?-]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="rawstring"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(')(\\u[0-9a-fA-F]{4})(')">
+ <bygroups>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringEscape"/>
+ <token type="LiteralStringChar"/>
+ </bygroups>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+[Uu]?[Ll]?|[0-9]+(\.[0-9]*)?([eE][+-][0-9]+)?[fF]?[Uu]?[Ll]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(companion)(\s+)(object)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(class|interface|object)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="(package|import)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="package"/>
+ </rule>
+ <rule pattern="(val|var)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="property"/>
+ </rule>
+ <rule pattern="(fun)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="function"/>
+ </rule>
+ <rule pattern="(abstract|actual|annotation|as|as\?|break|by|catch|class|companion|const|constructor|continue|crossinline|data|delegate|do|dynamic|else|enum|expect|external|false|field|file|final|finally|for|fun|get|if|import|in|infix|init|inline|inner|interface|internal|is|it|lateinit|noinline|null|object|open|operator|out|override|package|param|private|property|protected|public|receiver|reified|return|sealed|set|setparam|super|suspend|tailrec|this|throw|true|try|typealias|typeof|val|value|var|vararg|when|where|while)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="@(?:[_\p{L}][\p{L}\p{N}]*|`@?[_\p{L}][\p{L}\p{N}]+`)">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="(?:\p{Lu}[_\p{L}]*)(?=\.)">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(?:[_\p{L}][\p{L}\p{N}]*|`@?[_\p{L}][\p{L}\p{N}]+`)">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule pattern="<">
+ <token type="Punctuation"/>
+ <push state="generics-specification"/>
+ </rule>
+ <rule pattern="\x60[^\x60]+?\x60">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?:[_\p{L}][\p{L}\p{N}]*|`@?[_\p{L}][\p{L}\p{N}]+`)">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="rawstring">
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?:[^$"]+|\"{1,2}[^"])+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="string-interpol"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,42 @@
+<lexer>
+ <config>
+ <name>Lighttpd configuration file</name>
+ <alias>lighty</alias>
+ <alias>lighttpd</alias>
+ <mime_type>text/x-lighttpd-conf</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="#.*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\S*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[a-zA-Z._-]+">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\d+\.\d+\.\d+\.\d+(?:/\d+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="=>|=~|\+=|==|=|\+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\$[A-Z]+">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[(){}\[\],]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=""([^"\\]*(?:\\.[^"\\]*)*)"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,73 @@
+<lexer>
+ <config>
+ <name>LLVM</name>
+ <alias>llvm</alias>
+ <filename>*.ll</filename>
+ <mime_type>text/x-llvm</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="([-a-zA-Z$._][\w\-$.]*|"[^"]*?")\s*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule>
+ <include state="keyword"/>
+ </rule>
+ <rule pattern="%([-a-zA-Z$._][\w\-$.]*|"[^"]*?")">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="@([-a-zA-Z$._][\w\-$.]*|"[^"]*?")">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="%\d+">
+ <token type="NameVariableAnonymous"/>
+ </rule>
+ <rule pattern="@\d+">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="#\d+">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="!([-a-zA-Z$._][\w\-$.]*|"[^"]*?")">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="!\d+">
+ <token type="NameVariableAnonymous"/>
+ </rule>
+ <rule pattern="c?"[^"]*?"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="0[xX][a-fA-F0-9]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[=<>{}\[\]()*.,!]|x\b">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="(\n|\s)+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";.*?\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="keyword">
@@ -0,0 +1,158 @@
+<lexer>
+ <config>
+ <name>Lua</name>
+ <alias>lua</alias>
+ <filename>*.lua</filename>
+ <filename>*.wlua</filename>
+ <mime_type>text/x-lua</mime_type>
+ <mime_type>application/x-lua</mime_type>
+ </config>
+ <rules>
+ <state name="funcname">
+ <rule>
+ <include state="ws"/>
+ </rule>
+ <rule pattern="[.:]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(?:[^\W\d]\w*)(?=(?:(?:--\[(=*)\[[\w\W]*?\](\2)\])|(?:--.*$)|(?:\s+))*[.:])">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(?:[^\W\d]\w*)">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="label">
+ <rule>
+ <include state="ws"/>
+ </rule>
+ <rule pattern="::">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?:[^\W\d]\w*)">
+ <token type="NameLabel"/>
+ </rule>
+ </state>
+ <state name="dqs">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="#!.*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule>
+ <push state="base"/>
+ </rule>
+ </state>
+ <state name="ws">
+ <rule pattern="(?:--\[(=*)\[[\w\W]*?\](\1)\])">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(?:--.*$)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(?:\s+)">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="goto">
+ <rule>
+ <include state="ws"/>
+ </rule>
+ <rule pattern="(?:[^\W\d]\w*)">
+ <token type="NameLabel"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="sqs">
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\\']+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="base">
+ <rule>
+ <include state="ws"/>
+ </rule>
+ <rule pattern="(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(?i)\d+e[+-]?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(?s)\[(=*)\[.*?\]\1\]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="::">
+ <token type="Punctuation"/>
+ <push state="label"/>
+ </rule>
+ <rule pattern="\.{3}">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[=<>|~&+\-*/%#^]+|\.\.">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[\[\]{}().,:;]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(and|or|not)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(break|do|else|elseif|end|for|if|in|repeat|return|then|until|while)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="goto\b">
+ <token type="KeywordReserved"/>
+ <push state="goto"/>
+ </rule>
+ <rule pattern="(local)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(true|false|nil)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(function)\b">
+ <token type="KeywordReserved"/>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="[A-Za-z_]\w*(\.[A-Za-z_]\w*)?">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <combined state="stringescape" state="sqs"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <combined state="stringescape" state="dqs"/>
+ </rule>
+ </state>
+ <state name="stringescape">
+ <rule pattern="\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|u\{[0-9a-fA-F]+\})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,131 @@
+<lexer>
+ <config>
+ <name>Makefile</name>
+ <alias>make</alias>
+ <alias>makefile</alias>
+ <alias>mf</alias>
+ <alias>bsdmake</alias>
+ <filename>*.mak</filename>
+ <filename>*.mk</filename>
+ <filename>Makefile</filename>
+ <filename>makefile</filename>
+ <filename>Makefile.*</filename>
+ <filename>GNUmakefile</filename>
+ <filename>BSDmakefile</filename>
+ <filename>Justfile</filename>
+ <filename>justfile</filename>
+ <filename>.justfile</filename>
+ <mime_type>text/x-makefile</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^(?:[\t ]+.*\n|\n)+">
+ <using lexer="Bash"/>
+ </rule>
+ <rule pattern="\$[<@$+%?|*]">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#.*?\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(export)(\s+)(?=[\w${}\t -]+\n)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="export"/>
+ </rule>
+ <rule pattern="export\s+">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="([\w${}().-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <using lexer="Bash"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?s)"(\\\\|\\.|[^"\\])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="(?s)'(\\\\|\\.|[^'\\])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="([^\n:]+)(:+)([ \t]*)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="block-header"/>
+ </rule>
+ <rule pattern="\$\(">
+ <token type="Keyword"/>
+ <push state="expansion"/>
+ </rule>
+ </state>
+ <state name="expansion">
+ <rule pattern="[^$a-zA-Z_()]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Keyword"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="export">
+ <rule pattern="[\w${}-]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="block-header">
+ <rule pattern="[,|]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="#.*?\n">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\$\(">
+ <token type="Keyword"/>
+ <push state="expansion"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]+">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,120 @@
+<lexer>
+ <config>
+ <name>Mako</name>
+ <alias>mako</alias>
+ <filename>*.mao</filename>
+ <mime_type>application/x-mako</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(\s*)(%)(\s*end(?:\w+))(\n|\Z)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ <token type="Keyword"/>
+ <token type="Other"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(%)([^\n]*)(\n|\Z)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ <using lexer="Python"/>
+ <token type="Other"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(##[^\n]*)(\n|\Z)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ <token type="Other"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?s)<%doc>.*?</%doc>">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(<%)([\w.:]+)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="NameBuiltin"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="(</%)([\w.:]+)(>)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="NameBuiltin"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="<%(?=([\w.:]+))">
+ <token type="CommentPreproc"/>
+ <push state="ondeftags"/>
+ </rule>
+ <rule pattern="(<%(?:!?))(.*?)(%>)(?s)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <using lexer="Python"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$\{)(.*?)(\})">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <using lexer="Python"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?sx)
 (.+?) # anything, followed by:
 (?:
 (?<=\n)(?=%|\#\#) | # an eval or comment line
 (?=\#\*) | # multiline comment
 (?=</?%) | # a python block
 # call start or end
 (?=\$\{) | # a substitution
 (?<=\n)(?=\s*%) |
 # - don't consume
 (\\\n) | # an escaped newline
 \Z # end of string
 )
 ">
+ <bygroups>
+ <token type="Other"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="ondeftags">
+ <rule pattern="<%">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=<%)(include|inherit|namespace|page)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule>
+ <include state="tag"/>
+ </rule>
+ </state>
+ <state name="tag">
+ <rule pattern="((?:\w+)\s*=)(\s*)(".*?")">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="/?\s*>">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="attr">
+ <rule pattern="".*?"">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="'.*?'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\s>]+">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,89 @@
+<lexer>
+ <config>
+ <name>Mason</name>
+ <alias>mason</alias>
+ <filename>*.m</filename>
+ <filename>*.mhtml</filename>
+ <filename>*.mc</filename>
+ <filename>*.mi</filename>
+ <filename>autohandler</filename>
+ <filename>dhandler</filename>
+ <mime_type>application/x-mason</mime_type>
+ <priority>0.1</priority>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(<%doc>)(.*?)(</%doc>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="CommentMultiline"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="NameTag"/>
+ <usingself state="root"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="NameFunction"/>
+ <token type="NameTag"/>
+ <using lexer="Perl"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<&[^|])(.*?)(,.*?)?(&>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="NameFunction"/>
+ <using lexer="Perl"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<&\|)(.*?)(,.*?)?(&>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="NameFunction"/>
+ <using lexer="Perl"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="</&>">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern="(<%!?)(.*?)(%>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <using lexer="Perl"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?<=^)#[^\n]*(\n|\Z)">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(?<=^)(%)([^\n]*)(\n|\Z)">
+ <bygroups>
+ <token type="NameTag"/>
+ <using lexer="Perl"/>
+ <token type="Other"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?sx)
 (.+?) # anything, followed by:
 (?:
 (?<=\n)(?=[%#]) | # an eval or comment line
 (?=</?[%&]) | # a substitution or block or
 # call start or end
 # - don't consume
 (\\\n) | # an escaped newline
 \Z # end of string
 )">
+ <bygroups>
+ <using lexer="HTML"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,155 @@
+<lexer>
+ <config>
+ <name>Materialize SQL dialect</name>
+ <mime_type>text/x-materializesql</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ <alias>materialize</alias>
+ <alias>mzsql</alias>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text" />
+ </rule>
+ <rule pattern="--.*\n?">
+ <token type="CommentSingle" />
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline" />
+ <push state="multiline-comments" />
+ </rule>
+ <rule pattern="(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b">
+ <token type="NameBuiltin" />
+ </rule>
+ <rule pattern="(?s)(DO)(\s+)(?:(LANGUAGE)?(\s+)('?)(\w+)?('?)(\s+))?(\$)([^$]*)(\$)(.*?)(\$)(\10)(\$)">
+ <usingbygroup>
+ <sublexer_name_group>6</sublexer_name_group>
+ <code_group>12</code_group>
+ <emitters>
+ <token type="Keyword" />
+ <token type="Text" />
+ <token type="Keyword" />
+ <token type="Text" />
+ <token type="LiteralStringSingle" />
+ <token type="LiteralStringSingle" />
+ <token type="LiteralStringSingle" />
+ <token type="Text" />
+ <token type="LiteralStringHeredoc" />
+ <token type="LiteralStringHeredoc" />
+ <token type="LiteralStringHeredoc" />
+ <token type="LiteralStringHeredoc" />
+ <token type="LiteralStringHeredoc" />
+ <token type="LiteralStringHeredoc" />
+ <token type="LiteralStringHeredoc" />
+ </emitters>
+ </usingbygroup>
+ </rule>
@@ -0,0 +1,60 @@
+<lexer>
+ <config>
+ <name>Mathematica</name>
+ <alias>mathematica</alias>
+ <alias>mma</alias>
+ <alias>nb</alias>
+ <filename>*.cdf</filename>
+ <filename>*.m</filename>
+ <filename>*.ma</filename>
+ <filename>*.mt</filename>
+ <filename>*.mx</filename>
+ <filename>*.nb</filename>
+ <filename>*.nbp</filename>
+ <filename>*.wl</filename>
+ <mime_type>application/mathematica</mime_type>
+ <mime_type>application/vnd.wolfram.mathematica</mime_type>
+ <mime_type>application/vnd.wolfram.mathematica.package</mime_type>
+ <mime_type>application/vnd.wolfram.cdf</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(?s)\(\*.*?\*\)">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="([a-zA-Z]+[A-Za-z0-9]*`)">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="([A-Za-z0-9]*_+[A-Za-z0-9]*)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="#\d*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="([a-zA-Z]+[a-zA-Z0-9]*)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="-?\d+\.\d*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?\d*\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(!===|@@@|===|/;|:=|->|:>|/\.|=\.|~~|<=|@@|/@|&&|\|\||//|<>|;;|>=|-|@|!|\^|/|\*|\?|\+|&|<|>|=|\|)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(,|;|\(|\)|\[|\]|\{|\})">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="".*?"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,114 @@
+<lexer>
+ <config>
+ <name>Matlab</name>
+ <alias>matlab</alias>
+ <filename>*.m</filename>
+ <mime_type>text/matlab</mime_type>
+ </config>
+ <rules>
+ <state name="blockcomment">
+ <rule pattern="^\s*%\}">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="^.*\n">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern=".">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="deffunc">
+ <rule pattern="(\s*)(?:(.+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)">
+ <bygroups>
+ <token type="TextWhitespace"/>
+ <token type="Text"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(\s*)([a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^!.*">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="%\{\s*\n">
+ <token type="CommentMultiline"/>
+ <push state="blockcomment"/>
+ </rule>
+ <rule pattern="%.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="^\s*function">
+ <token type="Keyword"/>
+ <push state="deffunc"/>
+ </rule>
+ <rule pattern="(properties|persistent|enumerated|otherwise|continue|function|classdef|methods|elseif|events|switch|return|global|parfor|catch|break|while|else|spmd|case|try|end|for|if)\b">
+ <token type="Keyword"/>
+ </rule>
@@ -0,0 +1,138 @@
+
+<lexer>
+ <config>
+ <name>MCFunction</name>
+ <alias>mcfunction</alias>
+ <alias>mcf</alias>
+ <filename>*.mcfunction</filename>
+ <mime_type>text/mcfunction</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule><include state="names"/></rule>
+ <rule><include state="comments"/></rule>
+ <rule><include state="literals"/></rule>
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="property"/></rule>
+ <rule><include state="operators"/></rule>
+ <rule><include state="selectors"/></rule>
+ </state>
+ <state name="names">
+ <rule pattern="^(\s*)([a-z_]+)"><bygroups><token type="TextWhitespace"/><token type="NameBuiltin"/></bygroups></rule>
+ <rule pattern="(?<=run)\s+[a-z_]+"><token type="NameBuiltin"/></rule>
+ <rule pattern="\b[0-9a-fA-F]+(?:-[0-9a-fA-F]+){4}\b"><token type="NameVariable"/></rule>
+ <rule><include state="resource-name"/></rule>
+ <rule pattern="[A-Za-z_][\w.#%$]+"><token type="KeywordConstant"/></rule>
+ <rule pattern="[#%$][\w.#%$]+"><token type="NameVariableMagic"/></rule>
+ </state>
+ <state name="resource-name">
+ <rule pattern="#?[a-z_][a-z_.-]*:[a-z0-9_./-]+"><token type="NameFunction"/></rule>
+ <rule pattern="#?[a-z0-9_\.\-]+\/[a-z0-9_\.\-\/]+"><token type="NameFunction"/></rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ </state>
+ <state name="comments">
+ <rule pattern="^\s*(#[>!])"><token type="CommentMultiline"/><push state="comments.block" state="comments.block.emphasized"/></rule>
+ <rule pattern="#.*$"><token type="CommentSingle"/></rule>
+ </state>
+ <state name="comments.block">
+ <rule pattern="^\s*#[>!]"><token type="CommentMultiline"/><push state="comments.block.emphasized"/></rule>
+ <rule pattern="^\s*#"><token type="CommentMultiline"/><push state="comments.block.normal"/></rule>
+ <rule><pop depth="1"/></rule>
+ </state>
+ <state name="comments.block.normal">
+ <rule><include state="comments.block.special"/></rule>
+ <rule pattern="\S+"><token type="CommentMultiline"/></rule>
+ <rule pattern="\n"><token type="Text"/><pop depth="1"/></rule>
+ <rule><include state="whitespace"/></rule>
+ </state>
+ <state name="comments.block.emphasized">
+ <rule><include state="comments.block.special"/></rule>
+ <rule pattern="\S+"><token type="LiteralStringDoc"/></rule>
+ <rule pattern="\n"><token type="Text"/><pop depth="1"/></rule>
+ <rule><include state="whitespace"/></rule>
+ </state>
+ <state name="comments.block.special">
+ <rule pattern="@\S+"><token type="NameDecorator"/></rule>
+ <rule><include state="resource-name"/></rule>
+ <rule pattern="[#%$][\w.#%$]+"><token type="NameVariableMagic"/></rule>
+ </state>
+ <state name="operators">
+ <rule pattern="[\-~%^?!+*<>\\/|&=.]"><token type="Operator"/></rule>
+ </state>
+ <state name="literals">
+ <rule pattern="\.\."><token type="Literal"/></rule>
+ <rule pattern="(true|false)"><token type="KeywordPseudo"/></rule>
+ <rule pattern="[A-Za-z_]+"><token type="NameVariableClass"/></rule>
+ <rule pattern="[0-7]b"><token type="LiteralNumberByte"/></rule>
+ <rule pattern="[+-]?\d*\.?\d+([eE]?[+-]?\d+)?[df]?\b"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="[+-]?\d+\b"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern="""><token type="LiteralStringDouble"/><push state="literals.string-double"/></rule>
+ <rule pattern="'"><token type="LiteralStringSingle"/><push state="literals.string-single"/></rule>
+ </state>
+ <state name="literals.string-double">
+ <rule pattern="\\."><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\"\n]+"><token type="LiteralStringDouble"/></rule>
+ <rule pattern="""><token type="LiteralStringDouble"/><pop depth="1"/></rule>
+ </state>
+ <state name="literals.string-single">
+ <rule pattern="\\."><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\'\n]+"><token type="LiteralStringSingle"/></rule>
+ <rule pattern="'"><token type="LiteralStringSingle"/><pop depth="1"/></rule>
+ </state>
+ <state name="selectors">
+ <rule pattern="@[a-z]"><token type="NameVariable"/></rule>
+ </state>
+ <state name="property">
+ <rule pattern="\{"><token type="Punctuation"/><push state="property.curly" state="property.key"/></rule>
+ <rule pattern="\["><token type="Punctuation"/><push state="property.square" state="property.key"/></rule>
+ </state>
+ <state name="property.curly">
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="property"/></rule>
+ <rule pattern="\}"><token type="Punctuation"/><pop depth="1"/></rule>
+ </state>
+ <state name="property.square">
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="property"/></rule>
+ <rule pattern="\]"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern=","><token type="Punctuation"/></rule>
+ </state>
+ <state name="property.key">
+ <rule><include state="whitespace"/></rule>
+ <rule pattern="#?[a-z_][a-z_\.\-]*\:[a-z0-9_\.\-/]+(?=\s*\=)"><token type="NameAttribute"/><push state="property.delimiter"/></rule>
+ <rule pattern="#?[a-z_][a-z0-9_\.\-/]+"><token type="NameAttribute"/><push state="property.delimiter"/></rule>
+ <rule pattern="[A-Za-z_\-\+]+"><token type="NameAttribute"/><push state="property.delimiter"/></rule>
+ <rule pattern="""><token type="NameAttribute"/><push state="property.delimiter"/></rule>
+ <rule pattern="'"><token type="NameAttribute"/><push state="property.delimiter"/></rule>
+ <rule pattern="-?\d+"><token type="LiteralNumberInteger"/><push state="property.delimiter"/></rule>
+ <rule><pop depth="1"/></rule>
+ </state>
+ <state name="property.key.string-double">
+ <rule pattern="\\."><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\"\n]+"><token type="NameAttribute"/></rule>
+ <rule pattern="""><token type="NameAttribute"/><pop depth="1"/></rule>
+ </state>
+ <state name="property.key.string-single">
+ <rule pattern="\\."><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\'\n]+"><token type="NameAttribute"/></rule>
+ <rule pattern="'"><token type="NameAttribute"/><pop depth="1"/></rule>
+ </state>
+ <state name="property.delimiter">
+ <rule><include state="whitespace"/></rule>
+ <rule pattern="[:=]!?"><token type="Punctuation"/><push state="property.value"/></rule>
+ <rule pattern=","><token type="Punctuation"/></rule>
+ <rule><pop depth="1"/></rule>
+ </state>
+ <state name="property.value">
+ <rule><include state="whitespace"/></rule>
+ <rule pattern="#?[a-z_][a-z_\.\-]*\:[a-z0-9_\.\-/]+"><token type="NameTag"/></rule>
+ <rule pattern="#?[a-z_][a-z0-9_\.\-/]+"><token type="NameTag"/></rule>
+ <rule><include state="literals"/></rule>
+ <rule><include state="property"/></rule>
+ <rule><pop depth="1"/></rule>
+ </state>
+ </rules>
+</lexer>
+
@@ -0,0 +1,85 @@
+<lexer>
+ <config>
+ <name>Meson</name>
+ <alias>meson</alias>
+ <alias>meson.build</alias>
+ <filename>meson.build</filename>
+ <filename>meson_options.txt</filename>
+ <mime_type>text/x-meson</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="#.*?$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="'''.*'''">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="[1-9][0-9]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="0o[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0x[a-fA-F0-9]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule>
+ <include state="string"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="expr"/>
+ </rule>
+ <rule pattern="[a-zA-Z_][a-zA-Z_0-9]*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'.*?(?<!\\)(\\\\)*?'">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(endforeach|continue|foreach|break|endif|else|elif|if)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="expr">
+ <rule pattern="(in|and|or|not)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(\*=|/=|%=|\+]=|-=|==|!=|\+|-|=)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[\[\]{}:().,?]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(false|true)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule>
+ <include state="builtins"/>
+ </rule>
+ <rule pattern="(target_machine|build_machine|host_machine|meson)\b">
+ <token type="NameVariableMagic"/>
+ </rule>
+ </state>
+ <state name="builtins">
+ <rule pattern="(?<!\.)(add_project_link_arguments|add_global_link_arguments|add_project_arguments|add_global_arguments|include_directories|configuration_data|declare_dependency|install_headers|both_libraries|install_subdir|add_test_setup|configure_file|static_library|shared_library|custom_target|add_languages|shared_module|set_variable|get_variable|find_library|find_program|build_target|install_data|environment|is_disabler|run_command|subdir_done|install_man|is_variable|subproject|dependency|join_paths|get_option|executable|generator|benchmark|disabler|project|message|library|summary|vcs_tag|warning|assert|subdir|range|files|error|test|jar)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\.)import\b">
+ <token type="NameNamespace"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,270 @@
+<lexer>
+ <config>
+ <name>Metal</name>
+ <alias>metal</alias>
+ <filename>*.metal</filename>
+ <mime_type>text/x-metal</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="function">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="macro">
+ <rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="CommentPreprocFile"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^/\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=\\)\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="if0">
+ <rule pattern="^\s*#if.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <push/>
+ </rule>
+ <rule pattern="^\s*#el(?:se|if).*\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="^\s*#endif.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".*?\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="statements">
+ <rule pattern="(namespace|constexpr|operator|template|using|this)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(enum)\b(\s+)(class)\b(\s*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(class|struct|enum|union)\b(\s*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="\[\[.+\]\]">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX]([0-9A-Fa-f]('?[0-9A-Fa-f]+)*)[LlUu]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0('?[0-7]+)+[LlUu]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[Bb][01]('?[01]+)*[LlUu]*">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="[0-9]('?[0-9]+)*[LlUu]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(continue|typedef|sizeof|extern|static|switch|struct|return|union|const|break|while|enum|else|case|for|do|if)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(bool|float|half|long|ptrdiff_t|size_t|unsigned|u?char|u?int((8|16|32|64)_t)?|u?short)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(bool|float|half|u?(char|int|long|short))(2|3|4)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="packed_(float|half|long|u?(char|int|short))(2|3|4)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(float|half)(2|3|4)x(2|3|4)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="atomic_u?int\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(rg?(8|16)(u|s)norm|rgba(8|16)(u|s)norm|srgba8unorm|rgb10a2|rg11b10f|rgb9e5)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(array|depth(2d|cube)(_array)?|depth2d_ms(_array)?|sampler|texture_buffer|texture(1|2)d(_array)?|texture2d_ms(_array)?|texture3d|texturecube(_array)?|uniform|visible_function_table)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(true|false|NULL)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(threadgroup_imageblock|threadgroup|constant|ray_data|device|thread)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(:)(?!:)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="(fragment|kernel|vertex)?((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="function"/>
+ </rule>
+ <rule pattern="(fragment|kernel|vertex)?((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)">
+ <bygroups>
+ <token type="Keyword"/>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <push state="statement"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="(\[\[.+\]\])(\s*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s*(?=[>{])">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="^#if\s+0">
+ <token type="CommentPreproc"/>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^#">
+ <token type="CommentPreproc"/>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(\n|[\w\W]*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="statement">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern="[{]">
+ <token type="Punctuation"/>
+ <push state="root"/>
+ </rule>
+ <rule pattern="[;}]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,82 @@
+<lexer>
+ <config>
+ <name>MiniZinc</name>
+ <alias>minizinc</alias>
+ <alias>MZN</alias>
+ <alias>mzn</alias>
+ <filename>*.mzn</filename>
+ <filename>*.dzn</filename>
+ <filename>*.fzn</filename>
+ <mime_type>text/minizinc</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\%(.*?)\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\b(annotation|constraint|predicate|minimize|function|maximize|satisfy|include|record|output|solve|test|list|type|ann|par|any|var|op|of)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(string|tuple|float|array|bool|enum|int|set)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\b(forall|where|endif|then|else|for|if)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(array_intersect|index_set_2of3|index_set_1of3|index_set_3of3|index_set_1of2|index_set_2of2|array_union|show_float|dom_array|int2float|set2array|index_set|dom_size|lb_array|is_fixed|ub_array|bool2int|show_int|array4d|array2d|array1d|array5d|array6d|array3d|product|length|assert|concat|trace|acosh|round|abort|log10|floor|sinh|tanh|atan|sqrt|asin|show|log2|card|ceil|cosh|join|pow|cos|max|log|exp|dom|sin|abs|fix|sum|tan|min|lb|ln|ub)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(not|<->|->|<-|\\/|xor|/\\)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(<|>|<=|>=|==|=|!=)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(\+|-|\*|/|div|mod)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(intersect|superset|symdiff|subset|union|diff|in)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(\\|\.\.|\+\+)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[|()\[\]{},:;]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="([+-]?)\d+(\.(?!\.)\d*)?([eE][-+]?\d+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="::\s*([^\W\d]\w*)(\s*\([^\)]*\))?">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="\b([^\W\d]\w*)\b(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^\W\d]\w*">
+ <token type="NameOther"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,73 @@
+<lexer>
+ <config>
+ <name>MLIR</name>
+ <alias>mlir</alias>
+ <filename>*.mlir</filename>
+ <mime_type>text/x-mlir</mime_type>
+ </config>
+ <rules>
+ <state name="whitespace">
+ <rule pattern="(\n|\s)+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="keyword">
+ <rule pattern="(constant|return)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(memref|tensor|vector|func|loc)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="bf16|f16|f32|f64|index">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="i[1-9]\d*">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="c?"[^"]*?"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\^([-a-zA-Z$._][\w\-$.0-9]*)\s*">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="([\w\d_$.]+)\s*=">
+ <token type="NameLabel"/>
+ </rule>
+ <rule>
+ <include state="keyword"/>
+ </rule>
+ <rule pattern="->">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="@([\w_][\w\d_$.]*)">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="[%#][\w\d_$.]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="([1-9?][\d?]*\s*x)+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="0[xX][a-fA-F0-9]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[=<>{}\[\]()*.,!:]|x\b">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[\w\d]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,245 @@
+<lexer>
+ <config>
+ <name>Modula-2</name>
+ <alias>modula2</alias>
+ <alias>m2</alias>
+ <filename>*.def</filename>
+ <filename>*.mod</filename>
+ <mime_type>text/x-modula2</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="dialecttags">
+ <rule pattern="\(\*!m2pim\*\)">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="\(\*!m2iso\*\)">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="\(\*!m2r10\*\)">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="\(\*!objm2\*\)">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="\(\*!m2iso\+aglet\*\)">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="\(\*!m2pim\+gm2\*\)">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="\(\*!m2iso\+p1\*\)">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="\(\*!m2iso\+xds\*\)">
+ <token type="CommentSpecial"/>
+ </rule>
+ </state>
+ <state name="unigraph_operators">
+ <rule pattern="[+-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[=#<>]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\^">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="@">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="&">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="~">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="`">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ <state name="string_literals">
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="identifiers">
+ <rule pattern="([a-zA-Z_$][\w$]*)">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="pragmas">
+ <rule pattern="<\*.*?\*>">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\(\*\$.*?\*\)">
+ <token type="CommentPreproc"/>
+ </rule>
+ </state>
+ <state name="comments">
+ <rule pattern="^//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\(\*([^$].*?)\*\)">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\*(.*?)\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\n+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="suffixed_number_literals">
+ <rule pattern="[0-7]+B">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[0-7]+C">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[0-9A-F]+H">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ </state>
+ <state name="plain_number_literals">
+ <rule pattern="[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*[eE][+-]?[0-9]+(\'[0-9]+)*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+(\'[0-9]+)*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="digraph_punctuation">
+ <rule pattern="\.\.">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="<<">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=">>">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="->">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\|#">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="##">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\|\*">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="unigraph_punctuation">
+ <rule pattern="[()\[\]{},.:;|]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="!">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\?">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="dialecttags"/>
+ </rule>
+ <rule>
+ <include state="pragmas"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule>
+ <include state="identifiers"/>
+ </rule>
+ <rule>
+ <include state="suffixed_number_literals"/>
+ </rule>
+ <rule>
+ <include state="prefixed_number_literals"/>
+ </rule>
+ <rule>
+ <include state="plain_number_literals"/>
+ </rule>
+ <rule>
+ <include state="string_literals"/>
+ </rule>
+ <rule>
+ <include state="digraph_punctuation"/>
+ </rule>
+ <rule>
+ <include state="digraph_operators"/>
+ </rule>
+ <rule>
+ <include state="unigraph_punctuation"/>
+ </rule>
+ <rule>
+ <include state="unigraph_operators"/>
+ </rule>
+ </state>
+ <state name="prefixed_number_literals">
+ <rule pattern="0b[01]+(\'[01]+)*">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[ux][0-9A-F]+(\'[0-9A-F]+)*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ </state>
+ <state name="digraph_operators">
+ <rule pattern="\*\.">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\+>">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="<>">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="<=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=">=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="==">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="::">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=":=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\+\+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="--">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,153 @@
+<lexer>
+ <config>
+ <name>MonkeyC</name>
+ <alias>monkeyc</alias>
+ <filename>*.mc</filename>
+ <mime_type>text/x-monkeyc</mime_type>
+ </config>
+ <rules>
+ <state name="class">
+ <rule pattern="([a-zA-Z_][\w_\.]*)(?:(\s+)(extends)(\s+)([a-zA-Z_][\w_\.]*))?">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule pattern="initialize">
+ <token type="NameFunctionMagic"/>
+ </rule>
+ <rule pattern="[a-zA-Z_][\w_\.]*">
+ <token type="NameFunction"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="module">
+ <rule pattern="[a-zA-Z_][\w_\.]*">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(\n|[\w\W]*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern=":[a-zA-Z_][\w_\.]*">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="[{}\[\]\(\),;:\.]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[&~\|\^!+\-*\/%=?]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="=>|[+-]=|&&|\|\||>>|<<|[<>]=?|[!=]=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(and|or|instanceof|has|extends|new)">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(false|null|true|NaN)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(using)((?:\s|\\\\s)+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern="(class)((?:\s|\\\\s)+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="(function)((?:\s|\\\\s)+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="function"/>
+ </rule>
+ <rule pattern="(module)((?:\s|\\\\s)+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="module"/>
+ </rule>
+ <rule pattern="\b(if|else|for|switch|case|while|break|continue|default|do|try|catch|finally|return|throw|extends|function)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(const|enum|hidden|public|protected|private|static)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\bvar\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="\b(Activity(Monitor|Recording)?|Ant(Plus)?|Application|Attention|Background|Communications|Cryptography|FitContributor|Graphics|Gregorian|Lang|Math|Media|Persisted(Content|Locations)|Position|Properties|Sensor(History|Logging)?|Storage|StringUtil|System|Test|Time(r)?|Toybox|UserProfile|WatchUi|Rez|Drawables|Strings|Fonts|method)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="\b(me|self|\$)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^''])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="-?(0x[0-9a-fA-F]+l?)">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="-?([0-9]+(\.[0-9]+[df]?|[df]))\b">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?([0-9]+l?)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="([a-zA-Z_][\w_\.]*)(?:(\s+)(as)(\s+)([a-zA-Z_][\w_]*))?">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="Text"/>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,90 @@
+<lexer>
+ <config>
+ <name>MorrowindScript</name>
+ <alias>morrowind</alias>
+ <alias>mwscript</alias>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(["'])(?:(?=(\\?))\2.)*?\1">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[0-9]+\.[0-9]*(?!\.)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="types"/>
+ </rule>
+ <rule>
+ <include state="builtins"/>
+ </rule>
+ <rule>
+ <include state="punct"/>
+ </rule>
+ <rule>
+ <include state="operators"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\S+\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[a-zA-Z0-9_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(?i)(begin|if|else|elseif|endif|while|endwhile|return|to)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(?i)(end)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(?i)(end)\w+.*$">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[\w+]->[\w+]">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ <state name="builtins">
@@ -0,0 +1,77 @@
+<lexer>
+ <config>
+ <name>Myghty</name>
+ <alias>myghty</alias>
+ <filename>*.myt</filename>
+ <filename>autodelegate</filename>
+ <mime_type>application/x-myghty</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(<%(?:def|method))(\s*)(.*?)(>)(.*?)(</%\2\s*>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ <token type="NameTag"/>
+ <usingself state="root"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<%\w+)(.*?)(>)(.*?)(</%\2\s*>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="NameFunction"/>
+ <token type="NameTag"/>
+ <using lexer="Python2"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<&[^|])(.*?)(,.*?)?(&>)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="NameFunction"/>
+ <using lexer="Python2"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<&\|)(.*?)(,.*?)?(&>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="NameFunction"/>
+ <using lexer="Python2"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="</&>">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern="(<%!?)(.*?)(%>)(?s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <using lexer="Python2"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?<=^)#[^\n]*(\n|\Z)">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(?<=^)(%)([^\n]*)(\n|\Z)">
+ <bygroups>
+ <token type="NameTag"/>
+ <using lexer="Python2"/>
+ <token type="Other"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?sx)
 (.+?) # anything, followed by:
 (?:
 (?<=\n)(?=[%#]) | # an eval or comment line
 (?=</?[%&]) | # a substitution or block or
 # call start or end
 # - don't consume
 (\\\n) | # an escaped newline
 \Z # end of string
 )">
+ <bygroups>
+ <token type="Other"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,121 @@
+<lexer>
+ <config>
+ <name>MySQL</name>
+ <alias>mysql</alias>
+ <alias>mariadb</alias>
+ <filename>*.sql</filename>
+ <mime_type>text/x-mysql</mime_type>
+ <mime_type>text/x-mariadb</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="[^']+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="''">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="double-string">
+ <rule pattern="[^"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="""">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="(#|--\s+).*\n?">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comments"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[0-9]*\.[0-9]+(e[+-][0-9]+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="((?:_[a-z0-9]+)?)(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <push state="string"/>
+ </rule>
+ <rule pattern="((?:_[a-z0-9]+)?)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <push state="double-string"/>
+ </rule>
+ <rule pattern="[+*/<>=~!@#%^&|`?-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(tinyint|smallint|mediumint|int|integer|bigint|date|datetime|time|bit|bool|tinytext|mediumtext|longtext|text|tinyblob|mediumblob|longblob|blob|float|double|double\s+precision|real|numeric|dec|decimal|timestamp|year|char|varchar|varbinary|varcharacter|enum|set)(\b\s*)(\()?">
+ <bygroups>
+ <token type="KeywordType"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
@@ -0,0 +1,126 @@
+<lexer>
+ <config>
+ <name>NASM</name>
+ <alias>nasm</alias>
+ <filename>*.asm</filename>
+ <filename>*.ASM</filename>
+ <filename>*.nasm</filename>
+ <mime_type>text/x-nasm</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <priority>1.0</priority> <!-- TASM uses the same file endings, but TASM is not as common as NASM, so we prioritize NASM higher by default. -->
+ </config>
+ <rules>
+ <state name="punctuation">
+ <rule pattern="[,():\[\]]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[&|^<>+*/%~-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[$]+">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="seg|wrt|strict">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="byte|[dq]?word">
+ <token type="KeywordType"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^\s*%">
+ <token type="CommentPreproc"/>
+ <push state="preproc"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[a-z$._?][\w$.?#@~]*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="([a-z$._?][\w$.?#@~]*)(\s+)(equ)">
+ <bygroups>
+ <token type="NameConstant"/>
+ <token type="KeywordDeclaration"/>
+ <token type="KeywordDeclaration"/>
+ </bygroups>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE">
+ <token type="Keyword"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="(?:res|d)[bwdqt]|times">
+ <token type="KeywordDeclaration"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="[a-z$._?][\w$.?#@~]*">
+ <token type="NameFunction"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="instruction-args">
+ <rule pattern=""(\\"|[^"\n])*"|'(\\'|[^'\n])*'|`(\\`|[^`\n])*`">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-7]+q">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[01]+b">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="[0-9]+\.e?[0-9]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule pattern="r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[a-z$._?][\w$.?#@~]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ </state>
+ <state name="preproc">
+ <rule pattern="[^;\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern=";.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";.*">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,143 @@
+<lexer>
+ <config>
+ <name>Natural</name>
+ <alias>natural</alias>
+ <filename>*.NSN</filename>
+ <filename>*.NSP</filename>
+ <filename>*.NSS</filename>
+ <filename>*.NSH</filename>
+ <filename>*.NSG</filename>
+ <filename>*.NSL</filename>
+ <filename>*.NSA</filename>
+ <filename>*.NSM</filename>
+ <filename>*.NSC</filename>
+ <filename>*.NS7</filename>
+ <mime_type>text/x-natural</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="common">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^\*.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="variable-names">
+ <rule pattern="[#+]?[\w\-\d]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\([a-zA-z]\d*\)">
+ <token type="Other"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="common"/>
+ </rule>
+ <rule pattern="(?:END-DEFINE|END-IF|END-FOR|END-SUBROUTINE|END-ERROR|END|IGNORE)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(?:INIT|CONST)\s*<\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(FORM)(\s+)(\w+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(DEFINE)(\s+)(SUBROUTINE)(\s+)([#+]?[\w\-\d]+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(PERFORM)(\s+)([#+]?[\w\-\d]+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(METHOD)(\s+)([\w~]+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s+)([\w\-]+)([=\-]>)([\w\-~]+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ <token type="Operator"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?<=(=|-)>)([\w\-~]+)(?=\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(TEXT)(-)(\d{3})">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Punctuation"/>
+ <token type="LiteralNumberInteger"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(TEXT)(-)(\w{3})">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Punctuation"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
@@ -0,0 +1,123 @@
+<lexer>
+ <config>
+ <name>NDISASM</name>
+ <alias>ndisasm</alias>
+ <mime_type>text/x-disasm</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <priority>0.5</priority> <!-- Lower than NASM -->
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^[0-9A-Za-z]+">
+ <token type="CommentSpecial"/>
+ <push state="offset"/>
+ </rule>
+ </state>
+ <state name="offset">
+ <rule pattern="[0-9A-Za-z]+">
+ <token type="CommentSpecial"/>
+ <push state="assembly"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ </state>
+ <state name="punctuation">
+ <rule pattern="[,():\[\]]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[&|^<>+*/%~-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[$]+">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="seg|wrt|strict">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="byte|[dq]?word">
+ <token type="KeywordType"/>
+ </rule>
+ </state>
+ <state name="assembly">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[a-z$._?][\w$.?#@~]*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="([a-z$._?][\w$.?#@~]*)(\s+)(equ)">
+ <bygroups>
+ <token type="NameConstant"/>
+ <token type="KeywordDeclaration"/>
+ <token type="KeywordDeclaration"/>
+ </bygroups>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|COMMON|CPU|GROUP|UPPERCASE|IMPORT|EXPORT|LIBRARY|MODULE">
+ <token type="Keyword"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="(?:res|d)[bwdqt]|times">
+ <token type="KeywordDeclaration"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="[a-z$._?][\w$.?#@~]*">
+ <token type="NameFunction"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ <pop depth="2"/>
+ </rule>
+ </state>
+ <state name="instruction-args">
+ <rule pattern=""(\\"|[^"\n])*"|'(\\'|[^'\n])*'|`(\\`|[^`\n])*`">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-7]+q">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[01]+b">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="[0-9]+\.e?[0-9]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule pattern="r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[a-z$._?][\w$.?#@~]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ <pop depth="3"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";.*">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,121 @@
+<lexer>
+ <config>
+ <name>Newspeak</name>
+ <alias>newspeak</alias>
+ <filename>*.ns2</filename>
+ <mime_type>text/x-newspeak</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\b(Newsqueak2)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="'[^']*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\b(class)(\s+)(\w+)(\s*)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(mixin|self|super|private|public|protected|nil|true|false)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(\w+\:)(\s*)([a-zA-Z_]\w+)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\w+)(\s*)(=)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="<\w+>">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule>
+ <include state="expressionstat"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ </state>
+ <state name="expressionstat">
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=":\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(\w+)(::)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\w+:">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\(|\)">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\[|\]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\{|\}">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\.|;">
+ <token type="Punctuation"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="literals"/>
+ </rule>
+ </state>
+ <state name="literals">
+ <rule pattern="\$.">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'[^']*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="#'[^']*'">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="#\w+:?">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=""[^"]*"">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,98 @@
+<lexer>
+ <config>
+ <name>Nginx configuration file</name>
+ <alias>nginx</alias>
+ <filename>nginx.conf</filename>
+ <mime_type>text/x-nginx-conf</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(include)(\s+)([^\s;]+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Name"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^\s;#]+">
+ <token type="Keyword"/>
+ <push state="stmt"/>
+ </rule>
+ <rule>
+ <include state="base"/>
+ </rule>
+ </state>
+ <state name="block">
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="[^\s;#]+">
+ <token type="KeywordNamespace"/>
+ <push state="stmt"/>
+ </rule>
+ <rule>
+ <include state="base"/>
+ </rule>
+ </state>
+ <state name="stmt">
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="block"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="base"/>
+ </rule>
+ </state>
+ <state name="base">
+ <rule pattern="#.*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="on|off">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\$[^\s;#()]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="([a-z0-9.-]+)(:)([0-9]+)">
+ <bygroups>
+ <token type="Name"/>
+ <token type="Punctuation"/>
+ <token type="LiteralNumberInteger"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-z-]+/[a-z-+]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[0-9]+[km]?\b">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(~)(\s*)([^\s{]+)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="LiteralStringRegex"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[:=~]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[^\s;#{}$]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="/[^\s;#]*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[$;]">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,211 @@
+<lexer>
+ <config>
+ <name>Nim</name>
+ <alias>nim</alias>
+ <alias>nimrod</alias>
+ <filename>*.nim</filename>
+ <filename>*.nimrod</filename>
+ <mime_type>text/x-nim</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="dqs">
+ <rule pattern="\\([\\abcefnrtvl"\']|\n|x[a-f0-9]{2}|[0-9]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ </state>
+ <state name="tdqs">
+ <rule pattern=""""(?!")">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule>
+ <include state="nl"/>
+ </rule>
+ </state>
+ <state name="funcname">
+ <rule pattern="((?![\d_])\w)(((?!_)\w)|(_(?!_)\w))*">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="`.+`">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="int-suffix">
+ <rule pattern="\'(i|u)(32|64)">
+ <token type="LiteralNumberIntegerLong"/>
+ </rule>
+ <rule pattern="\'(u|(i|u)(8|16))">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="float-suffix">
+ <rule pattern="\'(f|d|f(32|64))">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="strings">
+ <rule pattern="(?<!\$)\$(\d+|#|\w+)+">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="[^\\\'"$\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[\'"\\]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="nl">
+ <rule pattern="\n">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="chars">
+ <rule pattern="\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralStringChar"/>
+ </rule>
+ </state>
+ <state name="rdqs">
+ <rule pattern=""(?!")">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="""">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ </state>
+ <state name="float-number">
+ <rule pattern="\.(?!\.)[0-9_]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="e[+-]?[0-9][0-9_]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="#\[[\s\S]*?\]#">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="##.*$">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="[*=><+\-/@$~&%!?|\\\[\]]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(?:[\w]+)"""">
+ <token type="LiteralString"/>
+ <push state="tdqs"/>
+ </rule>
+ <rule pattern="(?:[\w]+)"">
+ <token type="LiteralString"/>
+ <push state="rdqs"/>
+ </rule>
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="tdqs"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="dqs"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringChar"/>
+ <push state="chars"/>
+ </rule>
+ <rule pattern="(a_?n_?d_?|o_?r_?|n_?o_?t_?|x_?o_?r_?|s_?h_?l_?|s_?h_?r_?|d_?i_?v_?|m_?o_?d_?|i_?n_?|n_?o_?t_?i_?n_?|i_?s_?|i_?s_?n_?o_?t_?)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(p_?r_?o_?c_?\s)(?![(\[\]])">
+ <token type="Keyword"/>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="(a_?d_?d_?r_?|a_?n_?d_?|a_?s_?|a_?s_?m_?|a_?t_?o_?m_?i_?c_?|b_?i_?n_?d_?|b_?l_?o_?c_?k_?|b_?r_?e_?a_?k_?|c_?a_?s_?e_?|c_?a_?s_?t_?|c_?o_?n_?c_?e_?p_?t_?|c_?o_?n_?s_?t_?|c_?o_?n_?t_?i_?n_?u_?e_?|c_?o_?n_?v_?e_?r_?t_?e_?r_?|d_?e_?f_?e_?r_?|d_?i_?s_?c_?a_?r_?d_?|d_?i_?s_?t_?i_?n_?c_?t_?|d_?i_?v_?|d_?o_?|e_?l_?i_?f_?|e_?l_?s_?e_?|e_?n_?d_?|e_?n_?u_?m_?|e_?x_?c_?e_?p_?t_?|e_?x_?p_?o_?r_?t_?|f_?i_?n_?a_?l_?l_?y_?|f_?o_?r_?|f_?u_?n_?c_?|i_?f_?|i_?n_?|y_?i_?e_?l_?d_?|i_?n_?t_?e_?r_?f_?a_?c_?e_?|i_?s_?|i_?s_?n_?o_?t_?|i_?t_?e_?r_?a_?t_?o_?r_?|l_?e_?t_?|m_?a_?c_?r_?o_?|m_?e_?t_?h_?o_?d_?|m_?i_?x_?i_?n_?|m_?o_?d_?|n_?o_?t_?|n_?o_?t_?i_?n_?|o_?b_?j_?e_?c_?t_?|o_?f_?|o_?r_?|o_?u_?t_?|p_?r_?o_?c_?|p_?t_?r_?|r_?a_?i_?s_?e_?|r_?e_?f_?|r_?e_?t_?u_?r_?n_?|s_?h_?a_?r_?e_?d_?|s_?h_?l_?|s_?h_?r_?|s_?t_?a_?t_?i_?c_?|t_?e_?m_?p_?l_?a_?t_?e_?|t_?r_?y_?|t_?u_?p_?l_?e_?|t_?y_?p_?e_?|w_?h_?e_?n_?|w_?h_?i_?l_?e_?|w_?i_?t_?h_?|w_?i_?t_?h_?o_?u_?t_?|x_?o_?r_?)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(f_?r_?o_?m_?|i_?m_?p_?o_?r_?t_?|i_?n_?c_?l_?u_?d_?e_?)\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(v_?a_?r)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(i_?n_?t_?|i_?n_?t_?8_?|i_?n_?t_?1_?6_?|i_?n_?t_?3_?2_?|i_?n_?t_?6_?4_?|f_?l_?o_?a_?t_?|f_?l_?o_?a_?t_?3_?2_?|f_?l_?o_?a_?t_?6_?4_?|b_?o_?o_?l_?|c_?h_?a_?r_?|r_?a_?n_?g_?e_?|a_?r_?r_?a_?y_?|s_?e_?q_?|s_?e_?t_?|s_?t_?r_?i_?n_?g_?)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(n_?i_?l_?|t_?r_?u_?e_?|f_?a_?l_?s_?e_?)\b">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="\b_\b">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[0-9][0-9_]*(?=([e.]|\'(f|d|f(32|64))))">
+ <token type="LiteralNumberFloat"/>
+ <push state="float-suffix" state="float-number"/>
+ </rule>
+ <rule pattern="0x[a-f0-9][a-f0-9_]*">
+ <token type="LiteralNumberHex"/>
+ <push state="int-suffix"/>
+ </rule>
+ <rule pattern="0b[01][01_]*">
+ <token type="LiteralNumberBin"/>
+ <push state="int-suffix"/>
+ </rule>
+ <rule pattern="0o[0-7][0-7_]*">
+ <token type="LiteralNumberOct"/>
+ <push state="int-suffix"/>
+ </rule>
+ <rule pattern="[0-9][0-9_]*">
+ <token type="LiteralNumberInteger"/>
+ <push state="int-suffix"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".+$">
+ <token type="Error"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,258 @@
+<lexer>
+ <config>
+ <name>Nix</name>
+ <alias>nixos</alias>
+ <alias>nix</alias>
+ <filename>*.nix</filename>
+ <mime_type>text/x-nix</mime_type>
+ </config>
+ <rules>
+ <state name="space">
+ <rule pattern="[ \t\r\n]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="paren">
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="scope">
+ <rule pattern="}:">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="in(?![a-zA-Z0-9_'-])">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\${">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpol"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ <rule pattern="(=|\?|,)">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ <state name="builtins">
+ <rule pattern="throw(?![a-zA-Z0-9_'-])">
+ <token type="NameException"/>
+ </rule>
+ <rule pattern="(dependencyClosure|fetchTarball|filterSource|currentTime|removeAttrs|baseNameOf|derivation|toString|builtins|getAttr|hasAttr|getEnv|isNull|abort|dirOf|toXML|map)(?![a-zA-Z0-9_'-])">
+ <token type="NameBuiltin"/>
+ </rule>
+ </state>
+ <state name="literals">
+ <rule pattern="(false|true|null)(?![a-zA-Z0-9_'-])">
+ <token type="NameConstant"/>
+ </rule>
+ <rule>
+ <include state="uri"/>
+ </rule>
+ <rule>
+ <include state="path"/>
+ </rule>
+ <rule>
+ <include state="int"/>
+ </rule>
+ <rule>
+ <include state="float"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="import(?![a-zA-Z0-9_'-])">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(inherit|assert|with|then|else|rec|if)(?![a-zA-Z0-9_'-])">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="list">
+ <rule pattern="\]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="operators">
+ <rule pattern=" [/-] ">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(\.)(\${)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="LiteralStringInterpol"/>
+ </bygroups>
+ <push state="interpol"/>
+ </rule>
+ <rule pattern="(\?)(\s*)(\${)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="LiteralStringInterpol"/>
+ </bygroups>
+ <push state="interpol"/>
+ </rule>
+ <rule pattern="(&&|>=|<=|\+\+|->|!=|=|\|\||//|==|@|!|\+|\?|<|\.|>|\*)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[;:]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".|\n">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="interpol">
+ <rule pattern="}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="path">
+ <rule pattern="[a-zA-Z0-9._+-]*(/[a-zA-Z0-9._+-]+)+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="~(/[a-zA-Z0-9._+-]+)+/?">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="<[a-zA-Z0-9._+-]+(/[a-zA-Z0-9._+-]+)*>">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="float">
+ <rule pattern="-?(([1-9][0-9]*\.[0-9]*)|(0?\.[0-9]+))([Ee][+-]?[0-9]+)?(?![a-zA-Z0-9_'-])">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="builtins"/>
+ </rule>
+ <rule>
+ <include state="literals"/>
+ </rule>
+ <rule>
+ <include state="operators"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="paren"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push state="list"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="qstring"/>
+ </rule>
+ <rule pattern="''">
+ <token type="LiteralStringSingle"/>
+ <push state="istring"/>
+ </rule>
+ <rule pattern="{">
+ <token type="Punctuation"/>
+ <push state="scope"/>
+ </rule>
+ <rule pattern="let(?![a-zA-Z0-9_'-])">
+ <token type="Keyword"/>
+ <push state="scope"/>
+ </rule>
+ <rule>
+ <include state="id"/>
+ </rule>
+ <rule>
+ <include state="space"/>
+ </rule>
+ </state>
+ <state name="int">
+ <rule pattern="-?[0-9]+(?![a-zA-Z0-9_'-])">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="uri">
+ <rule pattern="[a-zA-Z][a-zA-Z0-9+.-]*:[a-zA-Z0-9%/?:@&=+$,_.!~*'-]+">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ </state>
+ <state name="qstring">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\${">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpol"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern=".|\n">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="istring">
+ <rule pattern="''\$">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="'''">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="''\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="''">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\${">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpol"/>
+ </rule>
+ <rule pattern="\$.">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern=".|\n">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="id">
+ <rule pattern="[a-zA-Z_][a-zA-Z0-9_'-]*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,59 @@
+<lexer>
+ <config>
+ <name>NSIS</name>
+ <alias>nsis</alias>
+ <alias>nsi</alias>
+ <alias>nsh</alias>
+ <filename>*.nsi</filename>
+ <filename>*.nsh</filename>
+ <mime_type>text/x-nsis</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="([;#].*)(\n)"><bygroups><token type="Comment"/><token type="TextWhitespace"/></bygroups></rule>
+ <rule pattern="'.*?'"><token type="LiteralStringSingle"/></rule>
+ <rule pattern="""><token type="LiteralStringDouble"/><push state="str_double"/></rule>
+ <rule pattern="`"><token type="LiteralStringBacktick"/><push state="str_backtick"/></rule>
+ <rule><include state="macro"/></rule>
+ <rule><include state="interpol"/></rule>
+ <rule><include state="basic"/></rule>
+ <rule pattern="\$\{[a-z_|][\w|]*\}"><token type="KeywordPseudo"/></rule>
+ <rule pattern="/[a-z_]\w*"><token type="NameAttribute"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="[\w.]+"><token type="Text"/></rule>
+ </state>
+ <state name="basic">
+ <rule pattern="(\n)(Function)(\s+)([._a-z][.\w]*)\b"><bygroups><token type="TextWhitespace"/><token type="Keyword"/><token type="TextWhitespace"/><token type="NameFunction"/></bygroups></rule>
+ <rule pattern="\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b"><bygroups><token type="KeywordNamespace"/><token type="Punctuation"/><token type="NameFunction"/></bygroups></rule>
+ <rule pattern="\b([_a-z]\w*)(:)"><bygroups><token type="NameLabel"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="(\b[ULS]|\B)([!<>=]?=|\<\>?|\>)\B"><token type="Operator"/></rule>
+ <rule pattern="[|+-]"><token type="Operator"/></rule>
+ <rule pattern="\\"><token type="Punctuation"/></rule>
@@ -0,0 +1,510 @@
+<lexer>
+ <config>
+ <name>Objective-C</name>
+ <alias>objective-c</alias>
+ <alias>objectivec</alias>
+ <alias>obj-c</alias>
+ <alias>objc</alias>
+ <filename>*.m</filename>
+ <filename>*.h</filename>
+ <mime_type>text/x-objective-c</mime_type>
+ </config>
+ <rules>
+ <state name="macro">
+ <rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="CommentPreprocFile"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^/\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=\\)\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="literal_number">
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="literal_number_inner"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Literal"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="statement"/>
+ </rule>
+ </state>
+ <state name="if0">
+ <rule pattern="^\s*#if.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <push/>
+ </rule>
+ <rule pattern="^\s*#el(?:se|if).*\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="^\s*#endif.*?(?<!\\)\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".*?\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^([-+])(\s*)(\(.*?\))?(\s*)([a-zA-Z$_][\w$]*:?)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <usingself state="root"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ <push state="method"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="function"/>
+ </rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <usingself state="root"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <push state="statement"/>
+ </rule>
+ </state>
+ <state name="statements">
+ <rule pattern="@"">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="@(YES|NO)">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="@(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="@0x[0-9a-fA-F]+[Ll]?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="@0[0-7]+[Ll]?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="@\d+[Ll]?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="@\(">
+ <token type="Literal"/>
+ <push state="literal_number"/>
+ </rule>
+ <rule pattern="@\[">
+ <token type="Literal"/>
+ <push state="literal_array"/>
+ </rule>
+ <rule pattern="@\{">
+ <token type="Literal"/>
+ <push state="literal_dictionary"/>
+ </rule>
+ <rule pattern="(unsafe_unretained|__bridge_transfer|@autoreleasepool|__autoreleasing|@synchronized|@synthesize|@protected|@selector|@required|@optional|readwrite|@property|nonatomic|@finally|__bridge|@dynamic|__strong|readonly|@private|__block|@public|@encode|release|assign|retain|atomic|@throw|@catch|__weak|setter|getter|typeof|strong|inout|class|@try|@end|weak|copy|out|in)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(instancetype|IBOutlet|IBAction|unichar|Class|BOOL|IMP|SEL|id)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="@(true|false|YES|NO)\n">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(YES|NO|nil|self|super)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(TRUE|FALSE)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(@interface|@implementation)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="#pop" state="oc_classname"/>
+ </rule>
+ <rule pattern="(@class|@protocol)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="#pop" state="oc_forward_classname"/>
+ </rule>
+ <rule pattern="@">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(L?)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(L?)(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ <token type="LiteralStringChar"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+[LlUu]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[0-7]+[LlUu]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="\d+[LlUu]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(restricted|volatile|continue|register|default|typedef|struct|extern|switch|sizeof|static|return|union|while|const|break|goto|enum|else|case|auto|for|asm|if|do)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(bool|int|long|float|short|double|char|unsigned|signed|void)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(typename|__inline|restrict|_inline|thread|inline|naked)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(__m(128i|128d|128|64))\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="__(forceinline|identifier|unaligned|declspec|fastcall|finally|stdcall|wchar_t|assume|except|int32|cdecl|int16|leave|based|raise|int64|noop|int8|w64|try|asm)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false|NULL)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(:)(?!:)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="method">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\.\.\.">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z$_][\w$]*:">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="function"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="literal_array">
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push state="literal_array_inner"/>
+ </rule>
+ <rule pattern="\]">
+ <token type="Literal"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="statement"/>
+ </rule>
+ </state>
+ <state name="oc_classname">
+ <rule pattern="([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="#pop" state="oc_ivars"/>
+ </rule>
+ <rule pattern="([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="NameLabel"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="#pop" state="oc_ivars"/>
+ </rule>
+ <rule pattern="([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([a-zA-Z$_][\w$]*)(\s*)(\{)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="#pop" state="oc_ivars"/>
+ </rule>
+ <rule pattern="([a-zA-Z$_][\w$]*)">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="^#if\s+0">
+ <token type="CommentPreproc"/>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^#">
+ <token type="CommentPreproc"/>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="if0"/>
+ </rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(\n|[\w\W]*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="literal_number_inner">
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="statement"/>
+ </rule>
+ </state>
+ <state name="statement">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="oc_forward_classname">
+ <rule pattern="([a-zA-Z$_][\w$]*)(\s*,\s*)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="oc_forward_classname"/>
+ </rule>
+ <rule pattern="([a-zA-Z$_][\w$]*)(\s*;?)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="literal_array_inner">
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="statement"/>
+ </rule>
+ </state>
+ <state name="literal_dictionary">
+ <rule pattern="\}">
+ <token type="Literal"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="statement"/>
+ </rule>
+ </state>
+ <state name="oc_ivars">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,145 @@
+<lexer>
+ <config>
+ <name>ObjectPascal</name>
+ <alias>objectpascal</alias>
+ <filename>*.pas</filename>
+ <filename>*.pp</filename>
+ <filename>*.inc</filename>
+ <filename>*.dpr</filename>
+ <filename>*.dpk</filename>
+ <filename>*.lpr</filename>
+ <filename>*.lpk</filename>
+ <mime_type>text/x-pascal</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <!-- TextWhitespace -->
+ <rule pattern="[^\S\n]+">
+ <token type="TextWhitespace" />
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <!-- Magic Number (BOM) -->
+ <rule pattern="[^\u0000-\u007F]+">
+ <token type="Text"/>
+ </rule>
+ <!-- Compiler Directive -->
+ <rule pattern="\{[$].*?\}|\{[-](NOD|EXT|OBJ).*?\}|\([*][$].*?[*]\)">
+ <token type="CommentPreproc" />
+ </rule>
+ <!-- Comment Single -->
+ <rule pattern="(//.*?)(\n)">
+ <bygroups>
+ <token type="CommentSingle" />
+ <token type="TextWhitespace" />
+ </bygroups>
+ </rule>
+ <!-- Comment Multiline Block -->
+ <rule pattern="\([*](.|\n)*?[*]\)">
+ <token type="CommentMultiline"/>
+ </rule>
+ <!-- Comment Multiline Source Documentation -->
+ <rule pattern="[{](.|\n)*?[}]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <!-- Range Indicator -->
+ <rule pattern="(?i:(\.\.))">
+ <token type="Operator" />
+ </rule>
+ <!-- Control Character -->
+ <rule pattern="[\#][0-9a-fA-F]*|[0-9]+[xX][0-9a-fA-F]*">
+ <token type="LiteralStringEscape" />
+ </rule>
+ <!-- Numbers -->
+ <rule pattern="[\$][0-9a-fA-F]*[xX][0-9a-fA-F]*|[\$][0-9a-fA-F]*|([0-9]+[0-9a-fA-F]+(?=[hH]))">
+ <token type="LiteralNumberHex" />
+ </rule>
+ <rule pattern="[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*[eE][+-]?[0-9]+(\'[0-9]+)*|[0-9]+(\'[0-9]+)*\.[0-9]+(\'[0-9]+)*|\d+[eE][+-]?[0-9]+">
+ <token type="LiteralNumberFloat" />
+ </rule>
+ <rule pattern="0|[1-9][0-9_]*?">
+ <token type="LiteralNumberInteger" />
+ </rule>
+ <!-- Multiline string Literal -->
+ <rule pattern="('''\s*\n)(.|\n)*?(''')(?=\s*;)">
+ <token type="LiteralString" />
+ </rule>
+ <!-- string -->
+ <rule pattern="(?i:(\')).*?(?i:(\'))">
+ <token type="LiteralString" />
+ </rule>
+ <!-- string (Special case for Delphi Assembler)-->
+ <rule pattern="(?i:(")).*?(?i:("))">
+ <token type="LiteralString" />
+ </rule>
+ <!-- Simple Types -->
+ <rule pattern="\b(?!=\.)(?i:(NativeInt|NativeUInt|LongInt|LongWord|Integer|Int64|Cardinal|UInt64|ShortInt|SmallInt|FixedInt|Byte|Word|FixedUInt|Int8|Int16|Int32|UInt8|UInt16|UInt32|Real48|Single|Double|Real|Extended|Comp|Currency|Char|AnsiChar|WideChar|UCS2Char|UCS4Char|string|ShortString|AnsiString|UnicodeString|WideString|RawByteString|UTF8String|File|TextFile|Text|Boolean|ByteBool|WordBool|LongBool|Pointer|Variant|OleVariant))\b(?![<\/(])">
+ <token type="KeywordType" />
+ </rule>
+ <!-- T Types -->
+ <rule pattern="\b(?!=\.)(?i:(TSingleRec|TDoubleRec|TExtended80Rec|TByteArray|TTextBuf|TVarRec|TWordArray))\b(?![<\/(])">
+ <token type="KeywordType" />
+ </rule>
+ <!-- Pointer Types -->
+ <rule pattern="\b(?!=\.)(?i:(PChar|PAnsiChar|PWideChar|PRawByteString|PUnicodeString|PString|PAnsiString|PShortString|PTextBuf|PWideString|PByte|PShortInt|PWord|PSmallInt|PCardinal|PLongWord|PFixedUInt|PLongint|PFixedInt|PUInt64|PInt64|PNativeUInt|PNativeInt|PByteArray|PCurrency|PDouble|PExtended|PSingle|PInteger|POleVariant|PVarRec|PVariant|PWordArray|PBoolean|PWordBool|PLongBool|PPointer))\b(?![<\/(])">
+ <token type="KeywordType" />
+ </rule>
+ <!-- More Types -->
+ <rule pattern="\b(?!=\.)(?i:(IntPtr|UIntPtr|Float32|Float64|_ShortStr|_ShortString|_AnsiStr|_AnsiString|_AnsiChr|_AnsiChar|_WideStr|_WideString|_PAnsiChr|_PAnsiChar|UTF8Char|_AnsiChar|PUTF8Char|_PAnsiChar|MarshaledString|MarshaledAString))\b(?![<\/(])">
+ <token type="KeywordType" />
+ </rule>
+ <!-- Result -->
+ <rule pattern="\b(?!=\.)(?i:(Result))\b(?![<\/(])">
+ <token type="GenericEmph" />
+ </rule>
+ <!-- Result Constants -->
+ <rule pattern="\b(?!=\.)(?i:(True|False))\b(?![<\/(])">
+ <token type="NameConstant" />
+ </rule>
+ <!-- Operator (Assign) -->
+ <rule pattern="[(\:\=)]">
+ <token type="Operator" />
+ </rule>
+ <!-- Operators (Arithmetic, Unary Arithmetic, String, Pointer, Set, Relational, Address) -->
+ <rule pattern="[\+\-\*\/\^<>\=\@]">
+ <token type="Operator" />
+ </rule>
+ <!-- Operators (Arithmetic, Boolean, Logical (Bitwise), Set) -->
+ <rule pattern="\b(?i:([div][mod][not][and][or][xor][shl][shr][in]))\b">
+ <token type="OperatorWord" />
+ </rule>
+ <!-- Special Symbols (Escape, Literal Chr, Hex Value, Binary Numeral Expression Indicator) -->
+ <rule pattern="[&\#\$\%]">
+ <token type="Operator" />
+ </rule>
+ <!-- Special Symbols (Punctuation) -->
+ <rule pattern="[\(\)\,\.\:\;\[\]]">
+ <token type="Punctuation" />
+ </rule>
+ <!-- Reserved Words -->
+ <rule pattern="\b(?!=\.)(?i:(and|end|interface|record|var|array|except|is|repeat|while|as|exports|label|resourcestring|with|asm|file|library|set|xor|begin|finalization|mod|shl|case|finally|nil|shr|class|for|not|string|const|function|object|then|constructor|goto|of|threadvar|destructor|if|or|to|dispinterface|implementation|packed|try|div|in|procedure|type|do|inherited|program|unit|downto|initialization|property|until|else|inline|raise|uses))\b(?![<\/(])">
+ <token type="KeywordReserved" />
+ </rule>
+ <!-- Directives -->
+ <rule pattern="\b(?!=\.)(?i:(absolute|export|name|public|stdcall|abstract|external|published|strict|assembler|nodefault|read|stored|automated|final|operator|readonly|unsafe|cdecl|forward|out|reference|varargs|contains|helper|overload|register|virtual|default|implements|override|reintroduce|winapi|delayed|index|package|requires|write|deprecated|inline|pascal|writeonly|dispid|library|platform|safecall|dynamic|local|private|sealed|experimental|message|protected|static))\b(?![<\/(])">
+ <token type="Keyword" />
+ </rule>
+ <!-- Directives obsolete -->
+ <rule pattern="\b(?!=\.)(?i:(near|far|resident))\b(?![<\/(])">
+ <token type="Keyword" />
+ </rule>
+ <!-- Constant Expressions -->
+ <rule pattern="\b(?!=\.)(?i:(Abs|High|Low|Pred|Succ|Chr|Length|Odd|Round|Swap|Hi|Lo|Ord|SizeOf|Trunc))\b(?![<\/(])">
+ <token type="KeywordConstant" />
+ </rule>
+ <!-- everything else -->
+ <rule pattern="([^\W\d]|\$)[\w$]*">
+ <token type="Text" />
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,145 @@
+<lexer>
+ <config>
+ <name>OCaml</name>
+ <alias>ocaml</alias>
+ <filename>*.ml</filename>
+ <filename>*.mli</filename>
+ <filename>*.mll</filename>
+ <filename>*.mly</filename>
+ <mime_type>text/x-ocaml</mime_type>
+ </config>
+ <rules>
+ <state name="escape-sequence">
+ <rule pattern="\\[\\"\'ntbr]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\[0-9]{3}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\x[0-9a-fA-F]{2}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="false|true|\(\)|\[\]">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="\b([A-Z][\w\']*)(?=\s*\.)">
+ <token type="NameNamespace"/>
+ <push state="dotted"/>
+ </rule>
+ <rule pattern="\b([A-Z][\w\']*)">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="\(\*(?![)])">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\b(as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|false|for|fun|function|functor|if|in|include|inherit|initializer|lazy|let|match|method|module|mutable|new|object|of|open|private|raise|rec|sig|struct|then|to|true|try|type|value|val|virtual|when|while|with)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(~|\}|\|]|\||\{<|\{|`|_|]|\[\||\[>|\[<|\[|\?\?|\?|>\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\.\.|\.|->|-\.|-|,|\+|\*|\)|\(|&&|&|#|!=)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(and|asr|land|lor|lsl|lxor|mod|or)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="\b(unit|int|float|bool|string|char|list|array)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[^\W\d][\w']*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX][\da-fA-F][\da-fA-F_]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[oO][0-7][0-7_]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[bB][01][01_]*">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="\d[\d_]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'.'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[~?][a-z][\w\']*:">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^(*)]+">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="Comment"/>
+ <push/>
+ </rule>
+ <rule pattern="\*\)">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[(*)]">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule>
+ <include state="escape-sequence"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="dotted">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[A-Z][\w\']*(?=\s*\.)">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="[A-Z][\w\']*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-z_][\w\']*">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,101 @@
+<lexer>
+ <config>
+ <name>Octave</name>
+ <alias>octave</alias>
+ <filename>*.m</filename>
+ <mime_type>text/octave</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[%#].*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="^\s*function">
+ <token type="Keyword"/>
+ <push state="deffunc"/>
+ </rule>
+ <rule pattern="(unwind_protect_cleanup|end_unwind_protect|unwind_protect|end_try_catch|endproperties|endclassdef|endfunction|persistent|properties|endmethods|otherwise|endevents|endswitch|__FILE__|continue|classdef|__LINE__|endwhile|function|methods|elseif|return|static|events|global|endfor|switch|until|endif|while|catch|break|case|else|set|end|try|for|get|do|if)\b">
+ <token type="Keyword"/>
+ </rule>
@@ -0,0 +1,113 @@
+<lexer>
+ <config>
+ <name>Odin</name>
+ <alias>odin</alias>
+ <filename>*.odin</filename>
+ <mime_type>text/odin</mime_type>
+ </config>
+ <rules>
+ <state name="NestedComment">
+ <rule pattern = "/[*]">
+ <token type = "CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern = "[*]/">
+ <token type = "CommentMultiline"/>
+ <pop depth = "1"/>
+ </rule>
+ <rule pattern = "[\s\S]">
+ <token type = "CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern = "\n">
+ <token type = "TextWhitespace"/>
+ </rule>
+ <rule pattern = "\s+">
+ <token type = "TextWhitespace"/>
+ </rule>
+ <rule pattern = "//.*?\n">
+ <token type = "CommentSingle"/>
+ </rule>
+ <rule pattern = "/[*]">
+ <token type = "CommentMultiline"/>
+ <push state="NestedComment"/>
+ </rule>
+ <rule pattern = "(import|package)\b">
+ <token type = "KeywordNamespace"/>
+ </rule>
+ <rule pattern = "(proc|struct|map|enum|union)\b">
+ <token type = "KeywordDeclaration"/>
+ </rule>
+ <rule pattern = "(asm|auto_cast|bit_set|break|case|cast|context|continue|defer|distinct|do|dynamic|else|enum|fallthrough|for|foreign|if|import|in|map|not_in|or_else|or_return|package|proc|return|struct|switch|transmute|typeid|union|using|when|where|panic|real|imag|len|cap|append|copy|delete|new|make|clearpanic|real|imag|len|cap|append|copy|delete|new|make|clear)\b">
+ <token type = "Keyword"/>
+ </rule>
+ <rule pattern = "(true|false|nil)\b">
+ <token type = "KeywordConstant"/>
+ </rule>
+ <rule pattern = "(uint|u8|u16|u32|u64|int|i8|i16|i32|i64|i16le|i32le|i64le|i128le|u16le|u32le|u64le|u128le|i16be|i32be|i64be|i128be|u16be|u32be|u64be|u128be|f16|f32|f64|complex32|complex64|complex128|quaternion64|quaternion128|quaternion256|byte|rune|string|cstring|typeid|any|bool|b8|b16|b32|b64|uintptr|rawptr)\b">
+ <token type = "KeywordType"/>
+ </rule>
+ <rule pattern = "\#[a-zA-Z_]+\b">
+ <token type = "NameDecorator"/>
+ </rule>
+ <rule pattern = "\@(\([a-zA-Z_]+\b\s*.*\)|\(?[a-zA-Z_]+\)?)">
+ <token type = "NameAttribute"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(\s*)(\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="[^\W\d]\w*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern = "\d+i">
+ <token type = "LiteralNumber"/>
+ </rule>
+ <rule pattern = "\d+\.\d*([Ee][-+]\d+)?i">
+ <token type = "LiteralNumber"/>
+ </rule>
+ <rule pattern = "\.\d+([Ee][-+]\d+)?i">
+ <token type = "LiteralNumber"/>
+ </rule>
+ <rule pattern = "\d+[Ee][-+]\d+i">
+ <token type = "LiteralNumber"/>
+ </rule>
+ <rule pattern = "\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
+ <token type = "LiteralNumberFloat"/>
+ </rule>
+ <rule pattern = "\.\d+([eE][+\-]?\d+)?">
+ <token type = "LiteralNumberFloat"/>
+ </rule>
+ <rule pattern = "0o[0-7]+">
+ <token type = "LiteralNumberOct"/>
+ </rule>
+ <rule pattern = "0x[0-9a-fA-F_]+">
+ <token type = "LiteralNumberHex"/>
+ </rule>
+ <rule pattern = "0b[01_]+">
+ <token type = "LiteralNumberBin"/>
+ </rule>
+ <rule pattern = "(0|[1-9][0-9_]*)">
+ <token type = "LiteralNumberInteger"/>
+ </rule>
+ <rule pattern = "'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'" >
+ <token type = "LiteralStringChar"/>
+ </rule>
+ <rule pattern = "(`)([^`]*)(`)" >
+ <token type = "LiteralString"/>
+ </rule>
+ <rule pattern = ""(\\\\|\\"|[^"])*"" >
+ <token type = "LiteralString"/>
+ </rule>
+ <rule pattern = "(<<=|>>=|<<|>>|<=|>=|&=|&|\+=|-=|\*=|/=|%=|\||\^|=|&&|\|\||--|->|=|==|!=|:=|:|::|\.\.\<|\.\.=|[<>+\-*/%&])" >
+ <token type = "Operator"/>
+ </rule>
+ <rule pattern="[{}()\[\],.;]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,92 @@
+<lexer>
+ <config>
+ <name>OnesEnterprise</name>
+ <alias>ones</alias>
+ <alias>onesenterprise</alias>
+ <alias>1S</alias>
+ <alias>1S:Enterprise</alias>
+ <filename>*.EPF</filename>
+ <filename>*.epf</filename>
+ <filename>*.ERF</filename>
+ <filename>*.erf</filename>
+ <mime_type>application/octet-stream</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(.*?)\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(#область|#region|#конецобласти|#endregion|#если|#if|#иначе|#else|#конецесли|#endif).*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(&наклиенте|&atclient|&насервере|&atserver|&насерверебезконтекста|&atservernocontext|&наклиентенасерверебезконтекста|&atclientatservernocontext).*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(>=|<=|<>|\+|-|=|>|<|\*|/|%)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(;|,|\)|\(|\.)">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(истина|ложь|или|false|true|не|and|not|и|or)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(иначеесли|конецесли|иначе|тогда|если|elsif|endif|else|then|if)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(конеццикла|каждого|цикл|пока|для|while|enddo|по|each|из|for|do|in|to)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(продолжить|прервать|возврат|перейти|continue|return|break|goto)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(конецпроцедуры|конецфункции|процедура|функция|endprocedure|endfunction|procedure|function)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(экспорт|новый|перем|знач|export|new|val|var)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(вызватьисключение|конецпопытки|исключение|попытка|endtry|except|raise|try)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(выполнить|вычислить|execute|eval)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[_а-яА-Я0-9][а-яА-Я0-9]*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[_\w][\w]*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="""">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""C?">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^"]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,101 @@
+<lexer>
+ <config>
+ <name>OpenEdge ABL</name>
+ <alias>openedge</alias>
+ <alias>abl</alias>
+ <alias>progress</alias>
+ <alias>openedgeabl</alias>
+ <filename>*.p</filename>
+ <filename>*.cls</filename>
+ <filename>*.w</filename>
+ <filename>*.i</filename>
+ <mime_type>text/x-openedge</mime_type>
+ <mime_type>application/x-openedge</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="//.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="CommentPreproc"/>
+ <push state="preprocessor"/>
+ </rule>
+ <rule pattern="\s*&.*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+[LlUu]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(?i)(DEFINE|DEF|DEFI|DEFIN)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(?i)(^|(?<=[^\w\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|COM-HANDLE|DATE|DATETIME|DATETIME-TZ|DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|INT64|INTEGER|INT|INTE|INTEG|INTEGE|LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^\w\-]))">
+ <token type="KeywordType"/>
+ </rule>
@@ -0,0 +1,96 @@
+<lexer>
+ <config>
+ <name>OpenSCAD</name>
+ <alias>openscad</alias>
+ <filename>*.scad</filename>
+ <mime_type>text/x-scad</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//(\n|[\w\W]*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="[{}\[\]\(\),;:]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[*!#%\-+=?/]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="<|<=|==|!=|>=|>|&&|\|\|">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\$(f[asn]|t|vp[rtd]|children)">
+ <token type="NameVariableMagic"/>
+ </rule>
+ <rule pattern="(undef|PI)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(use|include)((?:\s|\\\\s)+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="includes"/>
+ </rule>
+ <rule pattern="(module)(\s*)([^\s\(]+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(function)(\s*)([^\s\(]+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(true|false)\b">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="\b(function|module|include|use|for|intersection_for|if|else|return)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(circle|square|polygon|text|sphere|cube|cylinder|polyhedron|translate|rotate|scale|resize|mirror|multmatrix|color|offset|hull|minkowski|union|difference|intersection|abs|sign|sin|cos|tan|acos|asin|atan|atan2|floor|round|ceil|ln|log|pow|sqrt|exp|rands|min|max|concat|lookup|str|chr|search|version|version_num|norm|cross|parent_module|echo|import|import_dxf|dxf_linear_extrude|linear_extrude|rotate_extrude|surface|projection|render|dxf_cross|dxf_dim|let|assign|len)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="\bchildren\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="-?\d+(\.\d+)?(e[+-]?\d+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="includes">
+ <rule pattern="(<)([^>]*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="CommentPreprocFile"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,329 @@
+<lexer>
+ <config>
+ <name>Org Mode</name>
+ <alias>org</alias>
+ <alias>orgmode</alias>
+ <filename>*.org</filename>
+ <mime_type>text/org</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^# .*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="^(\*)( COMMENT)( .*)$">
+ <bygroups>
+ <token type="GenericHeading"/>
+ <token type="NameEntity"/>
+ <token type="GenericStrong"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*\*+)( COMMENT)( .*)$">
+ <bygroups>
+ <token type="GenericSubheading"/>
+ <token type="NameEntity"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*)( DONE)( .*)$">
+ <bygroups>
+ <token type="GenericHeading"/>
+ <token type="LiteralStringRegex"/>
+ <token type="GenericStrong"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*\*+)( DONE)( .*)$">
+ <bygroups>
+ <token type="GenericSubheading"/>
+ <token type="LiteralStringRegex"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*)( TODO)( .*)$">
+ <bygroups>
+ <token type="GenericHeading"/>
+ <token type="Error"/>
+ <token type="GenericStrong"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*\*+)( TODO)( .*)$">
+ <bygroups>
+ <token type="GenericSubheading"/>
+ <token type="Error"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*)( .+?)( :[a-zA-Z0-9_@:]+:)$">
+ <bygroups>
+ <token type="GenericHeading"/>
+ <token type="GenericStrong"/>
+ <token type="GenericEmph"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*)( .+)$">
+ <bygroups>
+ <token type="GenericHeading"/>
+ <token type="GenericStrong"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*\*+)( .+?)( :[a-zA-Z0-9_@:]+:)$">
+ <bygroups>
+ <token type="GenericSubheading"/>
+ <token type="Text"/>
+ <token type="GenericEmph"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\*\*+)( .+)$">
+ <bygroups>
+ <token type="GenericSubheading"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( *)([+-] )(\[[ X]\])( .+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Keyword"/>
+ <usingself state="inline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( +)(\* )(\[[ X]\])( .+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Keyword"/>
+ <usingself state="inline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( *)([+-] )([^ \n]+ ::)( .+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Keyword"/>
+ <usingself state="inline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( +)(\* )([^ \n]+ ::)( .+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Keyword"/>
+ <usingself state="inline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( *)([+-] )(.+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <usingself state="inline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( +)(\* )(.+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <usingself state="inline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( *)([0-9]+[.)])( \[@[0-9]+\])( .+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="GenericEmph"/>
+ <usingself state="inline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( *)([0-9]+[.)])( .+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <usingself state="inline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?i)^( *#\+begin: )([^ ]+)([\w\W]*?\n)([\w\W]*?)(^ *#\+end: *$)">
+ <bygroups>
+ <token type="Comment"/>
+ <token type="CommentSpecial"/>
+ <token type="Comment"/>
+ <usingself state="inline"/>
+ <token type="Comment"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?i)^( *#\+begin_comment *\n)([\w\W]*?)(^ *#\+end_comment *$)">
+ <bygroups>
+ <token type="Comment"/>
+ <token type="Comment"/>
+ <token type="Comment"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?i)^( *#\+begin_src )([^ \n]+)(.*?\n)([\w\W]*?)(^ *#\+end_src *$)">
+ <usingbygroup>
+ <sublexer_name_group>2</sublexer_name_group>
+ <code_group>4</code_group>
+ <emitters>
+ <token type="Comment"/>
+ <token type="CommentSpecial"/>
+ <token type="Comment"/>
+ <token type="Text"/>
+ <token type="Comment"/>
+ </emitters>
+ </usingbygroup>
+ </rule>
+ <rule pattern="(?i)^( *#\+begin_export )(\w+)( *\n)([\w\W]*?)(^ *#\+end_export *$)">
+ <usingbygroup>
+ <sublexer_name_group>2</sublexer_name_group>
+ <code_group>4</code_group>
+ <emitters>
+ <token type="Comment"/>
+ <token type="CommentSpecial"/>
+ <token type="Text"/>
+ <token type="Text"/>
+ <token type="Comment"/>
+ </emitters>
+ </usingbygroup>
+ </rule>
+ <rule pattern="(?i)^( *#\+begin_)(\w+)( *\n)([\w\W]*?)(^ *#\+end_\2)( *$)">
+ <bygroups>
+ <token type="Comment"/>
+ <token type="Comment"/>
+ <token type="Text"/>
+ <token type="Text"/>
+ <token type="Comment"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(#\+\w+)(:.*)$">
+ <bygroups>
+ <token type="CommentSpecial"/>
+ <token type="Comment"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?i)^( *:\w+: *\n)([\w\W]*?)(^ *:end: *$)">
+ <bygroups>
+ <token type="Comment"/>
+ <token type="CommentSpecial"/>
+ <token type="Comment"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(.*)(\\\\)$">
+ <bygroups>
+ <usingself state="inline"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?i)^( *(?:DEADLINE|SCHEDULED): )(<[^<>]+?> *)$">
+ <bygroups>
+ <token type="Comment"/>
+ <token type="CommentSpecial"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?i)^( *CLOSED: )(\[[^][]+?\] *)$">
+ <bygroups>
+ <token type="Comment"/>
+ <token type="CommentSpecial"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="inline"/>
+ </rule>
+ </state>
+ <state name="inline">
+ <rule pattern="(\s*)(\*[^ \n*][^*]+?[^ \n*]\*)((?=\W|\n|$))">
+ <bygroups>
+ <token type="Text"/>
+ <token type="GenericStrong"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(/[^/]+?/)((?=\W|\n|$))">
+ <bygroups>
+ <token type="Text"/>
+ <token type="GenericEmph"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(=[^\n=]+?=)((?=\W|\n|$))">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(~[^\n~]+?~)((?=\W|\n|$))">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(\+[^+]+?\+)((?=\W|\n|$))">
+ <bygroups>
+ <token type="Text"/>
+ <token type="GenericDeleted"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\s*)(_[^_]+?_)((?=\W|\n|$))">
+ <bygroups>
+ <token type="Text"/>
+ <token type="GenericUnderline"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<)([^<>]+?)(>)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[{]{3}[^}]+[}]{3}">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="([^[])(\[fn:)([^]]+?)(\])([^]])">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameBuiltinPseudo"/>
+ <token type="LiteralString"/>
+ <token type="NameBuiltinPseudo"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\[\[)([^][]+?)(\]\[)([^][]+)(\]\])">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="NameTag"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\[\[)([^][]+?)(\]\])">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<<)([^<>]+?)(>>)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^( *)(\|[ -].*?[ -]\|)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,37 @@
+<lexer>
+ <config>
+ <name>PacmanConf</name>
+ <alias>pacmanconf</alias>
+ <filename>pacman.conf</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="^\s*\[.*?\]\s*$">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(\w+)(\s*)(=)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)(\w+)(\s*)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$repo|\$arch|%o|%u)\b">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,400 @@
+<lexer>
+ <config>
+ <name>Perl</name>
+ <alias>perl</alias>
+ <alias>pl</alias>
+ <filename>*.pl</filename>
+ <filename>*.pm</filename>
+ <filename>*.t</filename>
+ <mime_type>text/x-perl</mime_type>
+ <mime_type>application/x-perl</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\A\#!.+?$">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="\#.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="^=[a-zA-Z0-9]+\s+.*?\n=cut">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(continue|foreach|unless|return|elsif|CHECK|while|BEGIN|reset|print|until|next|else|INIT|then|last|redo|case|our|new|for|END|if|do|my)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(format)(\s+)(\w+)(\s*)(=)(\s*\n)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Name"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="format"/>
+ </rule>
+ <rule pattern="(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="s/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="s!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="s\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="s@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="s%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="s\{(\\\\|\\[^\\]|[^\\}])*\}\s*">
+ <token type="LiteralStringRegex"/>
+ <push state="balanced-regex"/>
+ </rule>
+ <rule pattern="s<(\\\\|\\[^\\]|[^\\>])*>\s*">
+ <token type="LiteralStringRegex"/>
+ <push state="balanced-regex"/>
+ </rule>
+ <rule pattern="s\[(\\\\|\\[^\\]|[^\\\]])*\]\s*">
+ <token type="LiteralStringRegex"/>
+ <push state="balanced-regex"/>
+ </rule>
+ <rule pattern="s\((\\\\|\\[^\\]|[^\\)])*\)\s*">
+ <token type="LiteralStringRegex"/>
+ <push state="balanced-regex"/>
+ </rule>
+ <rule pattern="m?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="m(?=[/!\\{<\[(@%$])">
+ <token type="LiteralStringRegex"/>
+ <push state="balanced-regex"/>
+ </rule>
+ <rule pattern="((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
@@ -0,0 +1,212 @@
+<lexer>
+ <config>
+ <name>PHP</name>
+ <alias>php</alias>
+ <alias>php3</alias>
+ <alias>php4</alias>
+ <alias>php5</alias>
+ <filename>*.php</filename>
+ <filename>*.php[345]</filename>
+ <filename>*.inc</filename>
+ <mime_type>text/x-php</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ <priority>3</priority>
+ </config>
+ <rules>
+ <state name="magicfuncs">
+ <rule pattern="(__callStatic|__set_state|__construct|__debugInfo|__toString|__destruct|__invoke|__wakeup|__clone|__sleep|__isset|__unset|__call|__get|__set)\b">
+ <token type="NameFunctionMagic"/>
+ </rule>
+ </state>
+ <state name="magicconstants">
+ <rule pattern="(__NAMESPACE__|__FUNCTION__|__METHOD__|__CLASS__|__TRAIT__|__LINE__|__FILE__|__DIR__)\b">
+ <token type="NameConstant"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="functionname">
+ <rule>
+ <include state="magicfuncs"/>
+ </rule>
+ <rule pattern="(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^{$"\\]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="(\{\$\{)(.*?)(\}\})">
+ <bygroups>
+ <token type="LiteralStringInterpol"/>
+ <usingself state="root"/>
+ <token type="LiteralStringInterpol"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{)(\$.*?)(\})">
+ <bygroups>
+ <token type="LiteralStringInterpol"/>
+ <usingself state="root"/>
+ <token type="LiteralStringInterpol"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\$\{)(\S+)(\})">
+ <bygroups>
+ <token type="LiteralStringInterpol"/>
+ <token type="NameVariable"/>
+ <token type="LiteralStringInterpol"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[${\\]">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\?>">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="LiteralString"/>
+ <token type="LiteralStringDelimiter"/>
+ <token type="LiteralString"/>
+ <token type="LiteralStringDelimiter"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\*\*.*?\*/">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="NameAttribute"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[~!%^&*+=|:.<>/@-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[\[\]{}();,]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(class)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(function)(\s*)(?=\()">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(function)(\s+)(&?)(\s*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="functionname"/>
+ </rule>
+ <rule pattern="(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameConstant"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule>
+ <include state="magicconstants"/>
+ </rule>
+ <rule pattern="\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+e[+-]?[0-9]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0x[a-f0-9_]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d[\d_]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="0b[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="'([^'\\]*(?:\\.[^'\\]*)*)'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="`([^`\\]*(?:\\.[^`\\]*)*)`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,105 @@
+<lexer>
+ <config>
+ <name>Pig</name>
+ <alias>pig</alias>
+ <filename>*.pig</filename>
+ <mime_type>text/x-pig</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="--.*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="/\*[\w\W]*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="types"/>
+ </rule>
+ <rule>
+ <include state="builtins"/>
+ </rule>
+ <rule>
+ <include state="punct"/>
+ </rule>
+ <rule>
+ <include state="operators"/>
+ </rule>
+ <rule pattern="[0-9]*\.[0-9]+(e[0-9]+)?[fd]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+L?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="([a-z_]\w*)(\s*)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[()#:]">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[^(:#\'")\s]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\S+\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|%declare|%default|define|dense|desc|describe|distinct|du|dump|eval|exex|explain|filter|flatten|foreach|full|generate|group|help|if|illustrate|import|inner|input|into|is|join|kill|left|limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|outer|output|parallel|pig|pwd|quit|register|returns|right|rm|rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|stream|through|union|using|void)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="builtins">
+ <rule pattern="(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|TOKENIZE)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ </state>
+ <state name="types">
+ <rule pattern="(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|int|long|tuple)\b">
+ <token type="KeywordType"/>
+ </rule>
+ </state>
+ <state name="punct">
+ <rule pattern="[;(){}\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="operators">
+ <rule pattern="[#=,./%+\-?]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(eq|gt|lt|gte|lte|neq|matches)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(==|<=|<|>=|>|!=)">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,73 @@
+<lexer>
+ <config>
+ <name>PkgConfig</name>
+ <alias>pkgconfig</alias>
+ <filename>*.pc</filename>
+ </config>
+ <rules>
+ <state name="curly">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="NameAttribute"/>
+ </rule>
+ </state>
+ <state name="spvalue">
+ <rule>
+ <include state="interp"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^${}#\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="^(\w+)(=)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^([\w.]+)(:)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="spvalue"/>
+ </rule>
+ <rule>
+ <include state="interp"/>
+ </rule>
+ <rule pattern="[^${}#=:\n.]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="interp">
+ <rule pattern="\$\$">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="curly"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,119 @@
+<lexer>
+ <config>
+ <name>PL/pgSQL</name>
+ <alias>plpgsql</alias>
+ <mime_type>text/x-plpgsql</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\%[a-z]\w*\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern=":=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\<\<[a-z]\w*\>\>">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="\#[a-z]\w*\b">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="--.*\n?">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comments"/>
+ </rule>
+ <rule pattern="(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b">
+ <token type="NameBuiltin"/>
+ </rule>
@@ -0,0 +1,21 @@
+<lexer>
+ <config>
+ <name>plaintext</name>
+ <alias>text</alias>
+ <alias>plain</alias>
+ <alias>no-highlight</alias>
+ <filename>*.txt</filename>
+ <mime_type>text/plain</mime_type>
+ <priority>-1</priority>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern=".+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,105 @@
+<lexer>
+ <config>
+ <name>Plutus Core</name>
+ <alias>plutus-core</alias>
+ <alias>plc</alias>
+ <filename>*.plc</filename>
+ <mime_type>text/x-plutus-core</mime_type>
+ <mime_type>application/x-plutus-core</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(\(|\))">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(\[|\])">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="({|})">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="([+-]?\d+)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(#([a-fA-F0-9][a-fA-F0-9])+)">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(\(\))">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="(True|False)">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="(con |abs |iwrap |unwrap |lam |builtin |delay |force |error)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(fun |all |ifix |lam |con )">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(type|fun )">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(program )(\S+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(unit|bool|integer|bytestring|string)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(addInteger |subtractInteger |multiplyInteger |divideInteger |quotientInteger |remainderInteger |modInteger |equalsInteger |lessThanInteger |lessThanEqualsInteger )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(appendByteString |consByteString |sliceByteString |lengthOfByteString |indexByteString |equalsByteString |lessThanByteString |lessThanEqualsByteString )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(sha2_256 |sha3_256 |blake2b_256 |verifySignature )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(appendString |equalsString |encodeUtf8 |decodeUtf8 )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(ifThenElse )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(chooseUnit )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(trace )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(fstPair |sndPair )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(chooseList |mkCons |headList |tailList |nullList )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(chooseData |constrData |mapData |listData |iData |bData |unConstrData |unMapData |unListData |unIData |unBData |equalsData )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(mkPairData |mkNilData |mkNilPairData )">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="([a-zA-Z][a-zA-Z0-9_']*)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,135 @@
+<lexer>
+ <config>
+ <name>Pony</name>
+ <alias>pony</alias>
+ <filename>*.pony</filename>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="nested_comment"/>
+ </rule>
+ <rule pattern=""""(?:.|\n)*?"""">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\'.*\'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="=>|[]{}:().~;,|&!^?[]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(addressof|digestof|consume|isnt|and|not|as|is|or)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="!=|==|<<|>>|[-+/*%=<>]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(compile_intrinsic|compile_error|continue|recover|return|repeat|lambda|elseif|object|#share|match|#send|#read|ifdef|until|embed|while|where|error|break|with|else|#any|this|then|tag|for|trn|try|ref|use|var|val|let|end|iso|box|in|if|do)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(actor|class|struct|primitive|interface|trait|type)((?:\s)+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="typename"/>
+ </rule>
+ <rule pattern="(new|fun|be)((?:\s)+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="methodname"/>
+ </rule>
+ <rule pattern="(DisposableActor|NullablePointer|AsioEventNotify|UnsignedInteger|RuntimeOptions|DoNotOptimise|FloatingPoint|SignedInteger|ReadElement|ArrayValues|StringBytes|StringRunes|InputNotify|InputStream|AsioEventID|ByteSeqIter|AmbientAuth|Comparable|ArrayPairs|Stringable|OutStream|SourceLoc|ArrayKeys|StdStream|Equatable|AsioEvent|Iterator|Platform|Unsigned|Greater|Compare|Integer|Pointer|ReadSeq|ByteSeq|String|Number|Signed|Float|USize|Stdin|ILong|ISize|HasEq|Array|ULong|Equal|I128|U128|Bool|Less|Real|None|Seq|I64|Any|F32|F64|U64|U32|I32|Int|I16|U16|Env|I8|U8)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="_?[A-Z]\w*">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="string\(\)">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="_\d*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="_?[a-z][\w\'_]*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="typename">
+ <rule pattern="(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="methodname">
+ <rule pattern="(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="nested_comment">
+ <rule pattern="[^*/]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,155 @@
+<lexer>
+ <config>
+ <name>PostgreSQL SQL dialect</name>
+ <alias>postgresql</alias>
+ <alias>postgres</alias>
+ <mime_type>text/x-postgresql</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="--.*\n?">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comments"/>
+ </rule>
+ <rule pattern="(bigint|bigserial|bit|bit\s+varying|bool|boolean|box|bytea|char|character|character\s+varying|cidr|circle|date|decimal|double\s+precision|float4|float8|inet|int|int2|int4|int8|integer|interval|json|jsonb|line|lseg|macaddr|money|numeric|path|pg_lsn|point|polygon|real|serial|serial2|serial4|serial8|smallint|smallserial|text|time|timestamp|timestamptz|timetz|tsquery|tsvector|txid_snapshot|uuid|varbit|varchar|with\s+time\s+zone|without\s+time\s+zone|xml|anyarray|anyelement|anyenum|anynonarray|anyrange|cstring|fdw_handler|internal|language_handler|opaque|record|void)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?s)(DO)(\s+)(?:(LANGUAGE)?(\s+)('?)(\w+)?('?)(\s+))?(\$)([^$]*)(\$)(.*?)(\$)(\10)(\$)">
+ <usingbygroup>
+ <sublexer_name_group>6</sublexer_name_group>
+ <code_group>12</code_group>
+ <emitters>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="LiteralStringSingle"/>
+ <token type="LiteralStringSingle"/>
+ <token type="LiteralStringSingle"/>
+ <token type="Text"/>
+ <token type="LiteralStringHeredoc"/>
+ <token type="LiteralStringHeredoc"/>
+ <token type="LiteralStringHeredoc"/>
+ <token type="LiteralStringHeredoc"/>
+ <token type="LiteralStringHeredoc"/>
+ <token type="LiteralStringHeredoc"/>
+ <token type="LiteralStringHeredoc"/>
+ </emitters>
+ </usingbygroup>
+ </rule>
@@ -0,0 +1,89 @@
+<lexer>
+ <config>
+ <name>PostScript</name>
+ <alias>postscript</alias>
+ <alias>postscr</alias>
+ <filename>*.ps</filename>
+ <filename>*.eps</filename>
+ <mime_type>application/postscript</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^%!.+\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="%%.*\n">
+ <token type="CommentSpecial"/>
+ </rule>
+ <rule pattern="(^%.*\n){2,}">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="%.*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="LiteralString"/>
+ <push state="stringliteral"/>
+ </rule>
+ <rule pattern="[{}<>\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="<[0-9A-Fa-f]+>(?=[()<>\[\]{}/%\s])">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?(?=[()<>\[\]{}/%\s])">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?(?=[()<>\[\]{}/%\s])">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\-|\+)?[0-9]+(?=[()<>\[\]{}/%\s])">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\/[^()<>\[\]{}/%\s]+(?=[()<>\[\]{}/%\s])">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[^()<>\[\]{}/%\s]+(?=[()<>\[\]{}/%\s])">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(false|true)(?=[()<>\[\]{}/%\s])">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)(?=[()<>\[\]{}/%\s])">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(dictstackoverflow|undefinedfilename|currentlinewidth|undefinedresult|currentmatrix|defaultmatrix|invertmatrix|concatmatrix|currentpoint|setlinewidth|syntaxerror|idtransform|identmatrix|setrgbcolor|stringwidth|setlinejoin|getinterval|itransform|strokepath|pathforall|rangecheck|setlinecap|dtransform|transform|translate|setmatrix|typecheck|undefined|scalefont|closepath|findfont|showpage|rcurveto|grestore|truncate|pathbbox|charpath|rlineto|rmoveto|ceiling|newpath|setdash|setfont|restore|curveto|setgray|stroke|pstack|matrix|length|lineto|repeat|rotate|moveto|shfill|concat|gsave|aload|scale|array|round|stack|index|begin|print|floor|exch|quit|clip|copy|bind|loop|idiv|fill|show|roll|exit|load|dict|save|arcn|sqrt|exec|rand|atan|end|div|abs|run|def|cvs|exp|cvi|sin|cos|get|dup|mod|put|sub|pop|add|neg|mul|arc|log|ln|gt)(?=[()<>\[\]{}/%\s])">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="stringliteral">
+ <rule pattern="[^()\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralStringEscape"/>
+ <push state="escape"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="LiteralString"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="escape">
+ <rule pattern="[0-8]{3}|n|r|t|b|f|\\|\(|\)">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,58 @@
+<lexer>
+ <config>
+ <name>POVRay</name>
+ <alias>pov</alias>
+ <filename>*.pov</filename>
+ <filename>*.inc</filename>
+ <mime_type>text/x-povray</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="/\*[\w\W]*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(?s)"(?:\\.|[^"\\])+"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="#(statistics|include|version|declare|default|warning|define|elseif|ifndef|switch|fclose|render|fopen|undef|error|debug|while|local|macro|range|ifdef|break|write|else|case|read|for|end|if)\b">
+ <token type="CommentPreproc"/>
+ </rule>
@@ -0,0 +1,51 @@
+<lexer>
+ <config>
+ <name>PowerQuery</name>
+ <alias>powerquery</alias>
+ <alias>pq</alias>
+ <filename>*.pq</filename>
+ <mime_type>text/x-powerquery</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(and|as|each|else|error|false|if|in|is|let|meta|not|null|or|otherwise|section|shared|then|true|try|type)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(#binary|#date|#datetime|#datetimezone|#duration|#infinity|#nan|#sections|#shared|#table|#time)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(([a-zA-Z]|_)[\w|._]*|#"[^"]+")">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="([0-9]+\.[0-9]+|\.[0-9]+)([eE][0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[\(\)\[\]\{\}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\.\.|\.\.\.|=>|<=|>=|<>|[@!?,;=<>\+\-\*\/&]">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,230 @@
+<lexer>
+ <config>
+ <name>PowerShell</name>
+ <alias>powershell</alias>
+ <alias>posh</alias>
+ <alias>ps1</alias>
+ <alias>psm1</alias>
+ <alias>psd1</alias>
+ <alias>pwsh</alias>
+ <filename>*.ps1</filename>
+ <filename>*.psm1</filename>
+ <filename>*.psd1</filename>
+ <mime_type>text/x-powershell</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="child"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(\s*)(#)(requires)(\s+)">
+ <bygroups>
+ <token type="TextWhitespace"/>
+ <token type="Comment"/>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="requires"/>
+ </rule>
+ <rule pattern="^(\s*#[#\s]*)(\.(?:component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis))([^\n]*$)">
+ <bygroups>
+ <token type="Comment"/>
+ <token type="LiteralStringDoc"/>
+ <token type="Comment"/>
+ </bygroups>
+ </rule>
+ <rule pattern="#[^\n]*?$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(&lt;|<)#">
+ <token type="CommentMultiline"/>
+ <push state="multline"/>
+ </rule>
+ <rule pattern="(?i)([A-Z]:)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="@"\n">
+ <token type="LiteralStringHeredoc"/>
+ <push state="heredoc-double"/>
+ </rule>
+ <rule pattern="@'\n.*?\n'@">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="@(?=\(|{)|\$(?=\()">
+ <token type="NameVariableMagic"/>
+ </rule>
+ <rule pattern="`[\'"$@-]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="'([^']|'')*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="(?<!\S)(function|filter|workflow)(\s*)(global:|script:|private:|env:)?(\w\S*\b)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariableMagic"/>
+ <token type="NameBuiltin"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?<!\S)(class|configuration)(\s+)(\w\S*)(\s*)(:*)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="TextWhitespace"/>
+ <token type="NameBuiltin"/>
+ <token type="NameBuiltin"/>
+ <token type="NameBuiltin"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\$false|\$null|\$true(?=\b)">
+ <token type="NameVariableMagic"/>
+ </rule>
+ <rule pattern="(\$|@@|@)((global|script|private|env):)?\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(parameter|validatenotnullorempty|validatescript|validaterange|validateset|validaterange|validatepattern|validatelength|validatecount|validatenotnullorempty|validatescript|cmdletbinding|alias)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[a-z]\w*-[a-z]\w*\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(mandatory|parametersetname|position|helpmessage|valuefrompipeline|valuefrompipelinebypropertyname|valuefromremainingarguments|dontshow)\b">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(confirmimpact|defaultparametersetname|helpuri|supportspaging|supportsshouldprocess|positionalbinding)\b">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(while|until|trap|switch|return|ref|process|param|parameter|in|if|global:|foreach|for|finally|filter|end|elseif|else|dynamicparam|do|default|continue|break|begin|\?|%|#script|#private|#local|#global|try|catch|throw)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="-(and|as|band|bnot|bor|bxor|casesensitive|ccontains|ceq|cge|cgt|cle|clike|clt|cmatch|cne|cnotcontains|cnotlike|cnotmatch|contains|creplace|eq|exact|f|file|ge|gt|icontains|ieq|ige|igt|ile|ilike|ilt|imatch|ine|inotcontains|inotlike|inotmatch|ireplace|is|isnot|le|like|lt|match|ne|not|notcontains|notlike|notmatch|or|regex|replace|wildcard)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(ac|asnp|cat|cd|cfs|chdir|clc|clear|clhy|cli|clp|cls|clv|cnsn|compare|copy|cp|cpi|cpp|curl|cvpa|dbp|del|diff|dir|dnsn|ebp|echo|epal|epcsv|epsn|erase|etsn|exsn|fc|fhx|fl|foreach|ft|fw|gal|gbp|gc|gci|gcm|gcs|gdr|ghy|gi|gjb|gl|gm|gmo|gp|gps|gpv|group|gsn|gsnp|gsv|gu|gv|gwmi|h|history|icm|iex|ihy|ii|ipal|ipcsv|ipmo|ipsn|irm|ise|iwmi|iwr|kill|lp|ls|man|md|measure|mi|mount|move|mp|mv|nal|ndr|ni|nmo|npssc|nsn|nv|ogv|oh|popd|ps|pushd|pwd|r|rbp|rcjb|rcsn|rd|rdr|ren|ri|rjb|rm|rmdir|rmo|rni|rnp|rp|rsn|rsnp|rujb|rv|rvpa|rwmi|sajb|sal|saps|sasv|sbp|sc|select|set|shcm|si|sl|sleep|sls|sort|sp|spjb|spps|spsv|start|sujb|sv|swmi|tee|trcm|type|wget|where|wjb|write)\s">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(\[)([a-z_\[][\w. `,\[\]]*)(\])">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameConstant"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?<!\[)(?<=\S[^\*|\n]\.)\w+(?=\s+|\(|\{|\.)">
+ <token type="NameProperty"/>
+ </rule>
+ <rule pattern="(?<!\w)([-+]?(?:[0-9]+)?\.?[0-9]+(?:(?:e|E)[0-9]+)?(?:F|f|D|d|M|m)?)((?i:[kmgtp]b)?)\b">
+ <bygroups>
+ <token type="LiteralNumberFloat"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="-[a-z_]\w*:*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[.,;@{}\[\]$()=+*/\\&%!~?^\x60|<>-]|::">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="requires">
+ <rule pattern="\s*\n|\s*$">
+ <token type="TextWhitespace"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="-(?i:modules|pssnapin|runasadministrator|ahellid|version|assembly|psedition)">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="-\S*\b">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\s+(\S+)">
+ <token type="NameAttribute"/>
+ </rule>
+ </state>
+ <state name="child">
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="multline">
+ <rule pattern="[^#&.]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="#(>|&gt;)">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(\s*\.)(component|description|example|externalhelp|forwardhelpcategory|forwardhelptargetname|functionality|inputs|link|notes|outputs|parameter|remotehelprunspace|role|synopsis)(\s*$)">
+ <bygroups>
+ <token type="CommentMultiline"/>
+ <token type="LiteralStringDoc"/>
+ <token type="CommentMultiline"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[#&.]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="`[0abfnrtv'\"$`]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^$`"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\$\(">
+ <token type="Punctuation"/>
+ <push state="child"/>
+ </rule>
+ <rule pattern="((\$)((global|script|private|env):)?\w+)|((\$){((global|script|private|env):)?\w+})">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="""">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[`$]">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="heredoc-double">
+ <rule pattern="\n"@">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\$\(">
+ <token type="Punctuation"/>
+ <push state="child"/>
+ </rule>
+ <rule pattern="((\$)((global|script|private|env):)?\w+)|((\$){((global|script|private|env):)?\w+})">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[^@\n]+"]">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,115 @@
+<lexer>
+ <config>
+ <name>Prolog</name>
+ <alias>prolog</alias>
+ <filename>*.ecl</filename>
+ <filename>*.prolog</filename>
+ <filename>*.pro</filename>
+ <filename>*.pl</filename>
+ <mime_type>text/x-prolog</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="nested-comment"/>
+ </rule>
+ <rule pattern="%.*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="0\'.">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="0b[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0o[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d\d?\'[a-zA-Z0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[\[\](){}|.,;!]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=":-|-->">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=""(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(?:''|[^'])*'">
+ <token type="LiteralStringAtom"/>
+ </rule>
+ <rule pattern="is\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(mod|div|not)\b">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="_">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="([a-z]+)(:)">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([a-zÀ---][\w$À---]*)(\s*)(:-|-->)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([a-zÀ---][\w$À---]*)(\s*)(\()">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zÀ---][\w$À---]*">
+ <token type="LiteralStringAtom"/>
+ </rule>
+ <rule pattern="[#&*+\-./:<=>?@\\^~¡-¿‐-〿]+">
+ <token type="LiteralStringAtom"/>
+ </rule>
+ <rule pattern="[A-Z_]\w*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\s+|[ --�]">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="nested-comment">
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="[^*/]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,119 @@
+
+<lexer>
+ <config>
+ <name>Promela</name>
+ <alias>promela</alias>
+ <filename>*.pml</filename>
+ <filename>*.prom</filename>
+ <filename>*.prm</filename>
+ <filename>*.promela</filename>
+ <filename>*.pr</filename>
+ <filename>*.pm</filename>
+ <mime_type>text/x-promela</mime_type>
+ </config>
+ <rules>
+ <state name="statements">
+ <rule pattern="(\[\]|<>|/\\|\\/)|(U|W|V)\b"><token type="Operator"/></rule>
+ <rule pattern="@"><token type="Punctuation"/></rule>
+ <rule pattern="(\.)([a-zA-Z_]\w*)"><bygroups><token type="Operator"/><token type="NameAttribute"/></bygroups></rule>
+ <rule><include state="keywords"/></rule>
+ <rule><include state="types"/></rule>
+ <rule pattern="([LuU]|u8)?(")"><bygroups><token type="LiteralStringAffix"/><token type="LiteralString"/></bygroups><push state="string"/></rule>
+ <rule pattern="([LuU]|u8)?(')(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])(')"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringChar"/><token type="LiteralStringChar"/><token type="LiteralStringChar"/></bygroups></rule>
+ <rule pattern="0[xX]([0-9a-fA-F](\'?[0-9a-fA-F])*\.[0-9a-fA-F](\'?[0-9a-fA-F])*|\.[0-9a-fA-F](\'?[0-9a-fA-F])*|[0-9a-fA-F](\'?[0-9a-fA-F])*)[pP][+-]?[0-9a-fA-F](\'?[0-9a-fA-F])*[lL]?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="(-)?(\d(\'?\d)*\.\d(\'?\d)*|\.\d(\'?\d)*|\d(\'?\d)*)[eE][+-]?\d(\'?\d)*[fFlL]?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="(-)?((\d(\'?\d)*\.(\d(\'?\d)*)?|\.\d(\'?\d)*)[fFlL]?)|(\d(\'?\d)*[fFlL])"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="(-)?0[xX][0-9a-fA-F](\'?[0-9a-fA-F])*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberHex"/></rule>
+ <rule pattern="(-)?0[bB][01](\'?[01])*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberBin"/></rule>
+ <rule pattern="(-)?0(\'?[0-7])+(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberOct"/></rule>
+ <rule pattern="(-)?\d(\'?\d)*(([uU][lL]{0,2})|[lL]{1,2}[uU]?)?"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]"><token type="Operator"/></rule>
+ <rule pattern="[()\[\],.]"><token type="Punctuation"/></rule>
+ <rule pattern="(true|false|NULL)\b"><token type="NameBuiltin"/></rule>
+ <rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="Name"/></rule>
+ </state>
+ <state name="types">
+ <rule pattern="(bit|bool|byte|pid|short|int|unsigned)\b"><token type="KeywordType"/></rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(atomic|break|d_step|do|od|for|in|goto|if|fi|unless)\b"><token type="Keyword"/></rule>
+ <rule pattern="(assert|get_priority|printf|printm|set_priority)\b"><token type="NameFunction"/></rule>
+ <rule pattern="(c_code|c_decl|c_expr|c_state|c_track)\b"><token type="Keyword"/></rule>
+ <rule pattern="(_|_last|_nr_pr|_pid|_priority|else|np_|STDIN)\b"><token type="NameBuiltin"/></rule>
+ <rule pattern="(empty|enabled|eval|full|len|nempty|nfull|pc_value)\b"><token type="NameFunction"/></rule>
+ <rule pattern="run\b"><token type="OperatorWord"/></rule>
+ <rule pattern="(active|chan|D_proctype|hidden|init|local|mtype|never|notrace|proctype|show|trace|typedef|xr|xs)\b"><token type="KeywordDeclaration"/></rule>
+ <rule pattern="(priority|provided)\b"><token type="Keyword"/></rule>
+ <rule pattern="(inline|ltl|select)\b"><token type="KeywordDeclaration"/></rule>
+ <rule pattern="skip\b"><token type="Keyword"/></rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="^#if\s+0"><token type="CommentPreproc"/><push state="if0"/></rule>
+ <rule pattern="^#"><token type="CommentPreproc"/><push state="macro"/></rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#if\s+0)"><bygroups><usingself state="root"/><token type="CommentPreproc"/></bygroups><push state="if0"/></rule>
+ <rule pattern="^(\s*(?:/[*].*?[*]/\s*)?)(#)"><bygroups><usingself state="root"/><token type="CommentPreproc"/></bygroups><push state="macro"/></rule>
+ <rule pattern="(^[ \t]*)(?!(?:public|private|protected|default)\b)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+)(\s*)(:)(?!:)"><bygroups><token type="TextWhitespace"/><token type="NameLabel"/><token type="TextWhitespace"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="\n"><token type="TextWhitespace"/></rule>
+ <rule pattern="[^\S\n]+"><token type="TextWhitespace"/></rule>
+ <rule pattern="\\\n"><token type="Text"/></rule>
+ <rule pattern="//(?:.|(?<=\\)\n)*\n"><token type="CommentSingle"/></rule>
+ <rule pattern="/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/"><token type="CommentMultiline"/></rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*"><token type="CommentMultiline"/></rule>
+ </state>
+ <state name="root">
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="keywords"/></rule>
+ <rule pattern="((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+(?:[&*\s])+)(\s*(?:(?:(?://(?:.|(?<=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+)(\s*(?:(?:(?://(?:.|(?<=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)(\([^;"\')]*?\))(\s*(?:(?:(?://(?:.|(?<=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)([^;{/"\']*)(\{)"><bygroups><usingself state="root"/><usingself state="whitespace"/><token type="NameFunction"/><usingself state="whitespace"/><usingself state="root"/><usingself state="whitespace"/><usingself state="root"/><token type="Punctuation"/></bygroups><push state="function"/></rule>
+ <rule pattern="((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+(?:[&*\s])+)(\s*(?:(?:(?://(?:.|(?<=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)((?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|::)+)(\s*(?:(?:(?://(?:.|(?<=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)(\([^;"\')]*?\))(\s*(?:(?:(?://(?:.|(?<=\\)\n)*\n)|(?:/(?:\\\n)?[*](?:[^*]|[*](?!(?:\\\n)?/))*[*](?:\\\n)?/))\s*)*)([^;/"\']*)(;)"><bygroups><usingself state="root"/><usingself state="whitespace"/><token type="NameFunction"/><usingself state="whitespace"/><usingself state="root"/><usingself state="whitespace"/><usingself state="root"/><token type="Punctuation"/></bygroups></rule>
+ <rule><include state="types"/></rule>
+ <rule><push state="statement"/></rule>
+ </state>
+ <state name="statement">
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="statements"/></rule>
+ <rule pattern="\}"><token type="Punctuation"/></rule>
+ <rule pattern="[{;]"><token type="Punctuation"/><pop depth="1"/></rule>
+ </state>
+ <state name="function">
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="statements"/></rule>
+ <rule pattern=";"><token type="Punctuation"/></rule>
+ <rule pattern="\{"><token type="Punctuation"/><push/></rule>
+ <rule pattern="\}"><token type="Punctuation"/><pop depth="1"/></rule>
+ </state>
+ <state name="string">
+ <rule pattern="""><token type="LiteralString"/><pop depth="1"/></rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\"\n]+"><token type="LiteralString"/></rule>
+ <rule pattern="\\\n"><token type="LiteralString"/></rule>
+ <rule pattern="\\"><token type="LiteralString"/></rule>
+ </state>
+ <state name="macro">
+ <rule pattern="(\s*(?:/[*].*?[*]/\s*)?)(include)(\s*(?:/[*].*?[*]/\s*)?)("[^"]+")([^\n]*)"><bygroups><usingself state="root"/><token type="CommentPreproc"/><usingself state="root"/><token type="CommentPreprocFile"/><token type="CommentSingle"/></bygroups></rule>
+ <rule pattern="(\s*(?:/[*].*?[*]/\s*)?)(include)(\s*(?:/[*].*?[*]/\s*)?)(<[^>]+>)([^\n]*)"><bygroups><usingself state="root"/><token type="CommentPreproc"/><usingself state="root"/><token type="CommentPreprocFile"/><token type="CommentSingle"/></bygroups></rule>
+ <rule pattern="[^/\n]+"><token type="CommentPreproc"/></rule>
+ <rule pattern="/[*](.|\n)*?[*]/"><token type="CommentMultiline"/></rule>
+ <rule pattern="//.*?\n"><token type="CommentSingle"/><pop depth="1"/></rule>
+ <rule pattern="/"><token type="CommentPreproc"/></rule>
+ <rule pattern="(?<=\\)\n"><token type="CommentPreproc"/></rule>
+ <rule pattern="\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
+ </state>
+ <state name="if0">
+ <rule pattern="^\s*#if.*?(?<!\\)\n"><token type="CommentPreproc"/><push/></rule>
+ <rule pattern="^\s*#el(?:se|if).*\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
+ <rule pattern="^\s*#endif.*?(?<!\\)\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
+ <rule pattern=".*?\n"><token type="Comment"/></rule>
+ </state>
+ <state name="classname">
+ <rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="NameClass"/><pop depth="1"/></rule>
+ <rule pattern="\s*(?=>)"><token type="Text"/><pop depth="1"/></rule>
+ <rule><pop depth="1"/></rule>
+ </state>
+ <state name="case-value">
+ <rule pattern="(?<!:)(:)(?!:)"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern="(?!\d)(?:[\w$]|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8})+"><token type="NameConstant"/></rule>
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="statements"/></rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,123 @@
+<lexer>
+ <config>
+ <name>PromQL</name>
+ <alias>promql</alias>
+ <filename>*.promql</filename>
+ </config>
+ <rules>
+ <state name="range">
+ <rule pattern="\]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[1-9][0-9]*[smhdwy]">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule pattern="\)">
+ <token type="Operator"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Operator"/>
+ <push/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(group_right|group_left|ignoring|without|offset|bool|on|by)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(count_values|quantile|bottomk|stdvar|stddev|count|group|topk|sum|min|max|avg)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(histogram_quantile|quantile_over_time|absent_over_time|stdvar_over_time|stddev_over_time|count_over_time|predict_linear|label_replace|max_over_time|avg_over_time|sum_over_time|days_in_month|min_over_time|day_of_month|holt_winters|day_of_week|label_join|sort_desc|clamp_max|timestamp|clamp_min|increase|changes|resets|vector|absent|idelta|minute|scalar|log10|delta|month|floor|deriv|round|irate|rate|year|sort|log2|sqrt|ceil|time|hour|abs|exp|ln)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="[1-9][0-9]*[smhdwy]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="-?[0-9]+\.[0-9]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="#.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(\+|\-|\*|\/|\%|\^)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="==|!=|>=|<=|<|>">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="and|or|unless">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="[_a-zA-Z][a-zA-Z0-9_]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(["\'])(.*?)(["\'])">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\(">
+ <token type="Operator"/>
+ <push state="function"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="labels"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Punctuation"/>
+ <push state="range"/>
+ </rule>
+ </state>
+ <state name="labels">
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|')(.*?)("|')">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ <token type="Operator"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,45 @@
+<lexer>
+ <config>
+ <name>properties</name>
+ <alias>java-properties</alias>
+ <filename>*.properties</filename>
+ <mime_type>text/x-java-properties</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^([ \t\f]*)([#!].*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentSingle"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^([ \t\f]*)(\S+?)([ \t\f]*)([=:])([ \t\f]*)(.*(?:(?<=\\)\n.*)*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^([ \t\f]*)(\S+)([ \t\f]+)(.*(?:(?<=\\)\n.*)*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^([ \t\f]*)(\w+)$">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameAttribute"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,118 @@
+<lexer>
+ <config>
+ <name>Protocol Buffer</name>
+ <alias>protobuf</alias>
+ <alias>proto</alias>
+ <filename>*.proto</filename>
+ </config>
+ <rules>
+ <state name="package">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="message">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="type">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[,;{}\[\]()<>]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="/(\\\n)?/(\n|(.|\n)*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?\*(.|\n)*?\*(\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\b(extensions|required|repeated|optional|returns|default|option|packed|import|ctype|oneof|max|rpc|to)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(sfixed32|sfixed64|fixed32|fixed64|sint32|sint64|double|string|uint32|uint64|int32|float|int64|bytes|bool)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(package)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="package"/>
+ </rule>
+ <rule pattern="(message|extend)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="message"/>
+ </rule>
+ <rule pattern="(enum|group|service)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="type"/>
+ </rule>
+ <rule pattern="\".*?\"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\'.*?\'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\-?(inf|nan))\b">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+[LlUu]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[0-7]+[LlUu]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="\d+[LlUu]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[+-=]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="([a-zA-Z_][\w.]*)([ \t]*)(=)">
+ <bygroups>
+ <token type="Name"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_][\w.]*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,161 @@
+<lexer>
+ <config>
+ <name>PRQL</name>
+ <alias>prql</alias>
+ <filename>*.prql</filename>
+ <mime_type>application/prql</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="#!.*"><token type="LiteralStringDoc"/></rule>
+ <rule pattern="#.*"><token type="CommentSingle"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="^(\s*)(module)(\s*)"><bygroups><token type="TextWhitespace"/><token type="KeywordNamespace"/><token type="TextWhitespace"/></bygroups><push state="imports"/></rule>
+ <rule pattern="(bool|int|int8|int16|int32|int64|int128|float|text|set)\b"><token type="KeywordType"/></rule>
+ <rule pattern="^prql "><token type="KeywordReserved"/></rule>
+ <rule pattern="let"><token type="KeywordDeclaration"/></rule>
+ <rule><include state="keywords"/></rule>
+ <rule><include state="expr"/></rule>
+ <rule pattern="^[A-Za-z_][a-zA-Z0-9_]*"><token type="Keyword"/></rule>
+ </state>
+ <state name="expr">
+ <rule pattern="(f)(""")"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringDouble"/></bygroups><combined state="fstringescape" state="tdqf"/></rule>
+ <rule pattern="(f)(''')"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringSingle"/></bygroups><combined state="fstringescape" state="tsqf"/></rule>
+ <rule pattern="(f)(")"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringDouble"/></bygroups><combined state="fstringescape" state="dqf"/></rule>
+ <rule pattern="(f)(')"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringSingle"/></bygroups><combined state="fstringescape" state="sqf"/></rule>
+ <rule pattern="(s)(""")"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringDouble"/></bygroups><combined state="stringescape" state="tdqf"/></rule>
+ <rule pattern="(s)(''')"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringSingle"/></bygroups><combined state="stringescape" state="tsqf"/></rule>
+ <rule pattern="(s)(")"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringDouble"/></bygroups><combined state="stringescape" state="dqf"/></rule>
+ <rule pattern="(s)(')"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringSingle"/></bygroups><combined state="stringescape" state="sqf"/></rule>
+ <rule pattern="(?i)(r)(""")"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringDouble"/></bygroups><push state="tdqs"/></rule>
+ <rule pattern="(?i)(r)(''')"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringSingle"/></bygroups><push state="tsqs"/></rule>
+ <rule pattern="(?i)(r)(")"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringDouble"/></bygroups><push state="dqs"/></rule>
+ <rule pattern="(?i)(r)(')"><bygroups><token type="LiteralStringAffix"/><token type="LiteralStringSingle"/></bygroups><push state="sqs"/></rule>
+ <rule pattern="""""><token type="LiteralStringDouble"/><combined state="stringescape" state="tdqs"/></rule>
+ <rule pattern="'''"><token type="LiteralStringSingle"/><combined state="stringescape" state="tsqs"/></rule>
+ <rule pattern="""><token type="LiteralStringDouble"/><combined state="stringescape" state="dqs"/></rule>
+ <rule pattern="'"><token type="LiteralStringSingle"/><combined state="stringescape" state="sqs"/></rule>
+ <rule pattern="@\d{4}-\d{2}-\d{2}T\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?"><token type="LiteralDate"/></rule>
+ <rule pattern="@\d{4}-\d{2}-\d{2}"><token type="LiteralDate"/></rule>
+ <rule pattern="@\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?"><token type="LiteralDate"/></rule>
+ <rule pattern="[^\S\n]+"><token type="Text"/></rule>
+ <rule><include state="numbers"/></rule>
+ <rule pattern="->|=>|==|!=|>=|<=|~=|&&|\|\||\?\?|\/\/"><token type="Operator"/></rule>
+ <rule pattern="[-~+/*%=<>&^|.@]"><token type="Operator"/></rule>
+ <rule pattern="[]{}:(),;[]"><token type="Punctuation"/></rule>
+ <rule><include state="functions"/></rule>
+ <rule pattern="[A-Za-z_][a-zA-Z0-9_]*"><token type="NameVariable"/></rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)([eE][+-]?\d(?:_?\d)*)?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="0[oO](?:_?[0-7])+"><token type="LiteralNumberOct"/></rule>
+ <rule pattern="0[bB](?:_?[01])+"><token type="LiteralNumberBin"/></rule>
+ <rule pattern="0[xX](?:_?[a-fA-F0-9])+"><token type="LiteralNumberHex"/></rule>
+ <rule pattern="\d(?:_?\d)*"><token type="LiteralNumberInteger"/></rule>
+ </state>
+ <state name="fstringescape">
+ <rule><include state="stringescape"/></rule>
+ </state>
+ <state name="bytesescape">
+ <rule pattern="\\([\\bfnrt"\']|\n|x[a-fA-F0-9]{2}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
+ </state>
+ <state name="stringescape">
+ <rule pattern="\\(N\{.*?\}|u\{[a-fA-F0-9]{1,6}\})"><token type="LiteralStringEscape"/></rule>
+ <rule><include state="bytesescape"/></rule>
+ </state>
+ <state name="fstrings-single">
+ <rule pattern="\}"><token type="LiteralStringInterpol"/></rule>
+ <rule pattern="\{"><token type="LiteralStringInterpol"/><push state="expr-inside-fstring"/></rule>
+ <rule pattern="[^\\\'"{}\n]+"><token type="LiteralStringSingle"/></rule>
+ <rule pattern="[\'"\\]"><token type="LiteralStringSingle"/></rule>
+ </state>
+ <state name="fstrings-double">
+ <rule pattern="\}"><token type="LiteralStringInterpol"/></rule>
+ <rule pattern="\{"><token type="LiteralStringInterpol"/><push state="expr-inside-fstring"/></rule>
+ <rule pattern="[^\\\'"{}\n]+"><token type="LiteralStringDouble"/></rule>
+ <rule pattern="[\'"\\]"><token type="LiteralStringDouble"/></rule>
+ </state>
+ <state name="strings-single">
+ <rule pattern="\{((\w+)((\.\w+)|(\[[^\]]+\]))*)?(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}"><token type="LiteralStringInterpol"/></rule>
+ <rule pattern="[^\\\'"%{\n]+"><token type="LiteralStringSingle"/></rule>
+ <rule pattern="[\'"\\]"><token type="LiteralStringSingle"/></rule>
+ <rule pattern="%|(\{{1,2})"><token type="LiteralStringSingle"/></rule>
+ </state>
+ <state name="strings-double">
+ <rule pattern="\{((\w+)((\.\w+)|(\[[^\]]+\]))*)?(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?\}"><token type="LiteralStringInterpol"/></rule>
+ <rule pattern="[^\\\'"%{\n]+"><token type="LiteralStringDouble"/></rule>
+ <rule pattern="[\'"\\]"><token type="LiteralStringDouble"/></rule>
+ <rule pattern="%|(\{{1,2})"><token type="LiteralStringDouble"/></rule>
+ </state>
+ <state name="dqf">
+ <rule pattern="""><token type="LiteralStringDouble"/><pop depth="1"/></rule>
+ <rule pattern="\\\\|\\"|\\\n"><token type="LiteralStringEscape"/></rule>
+ <rule><include state="fstrings-double"/></rule>
+ </state>
+ <state name="sqf">
+ <rule pattern="'"><token type="LiteralStringSingle"/><pop depth="1"/></rule>
+ <rule pattern="\\\\|\\'|\\\n"><token type="LiteralStringEscape"/></rule>
+ <rule><include state="fstrings-single"/></rule>
+ </state>
+ <state name="dqs">
+ <rule pattern="""><token type="LiteralStringDouble"/><pop depth="1"/></rule>
+ <rule pattern="\\\\|\\"|\\\n"><token type="LiteralStringEscape"/></rule>
+ <rule><include state="strings-double"/></rule>
+ </state>
+ <state name="sqs">
+ <rule pattern="'"><token type="LiteralStringSingle"/><pop depth="1"/></rule>
+ <rule pattern="\\\\|\\'|\\\n"><token type="LiteralStringEscape"/></rule>
+ <rule><include state="strings-single"/></rule>
+ </state>
+ <state name="tdqf">
+ <rule pattern="""""><token type="LiteralStringDouble"/><pop depth="1"/></rule>
+ <rule><include state="fstrings-double"/></rule>
+ <rule pattern="\n"><token type="LiteralStringDouble"/></rule>
+ </state>
+ <state name="tsqf">
+ <rule pattern="'''"><token type="LiteralStringSingle"/><pop depth="1"/></rule>
+ <rule><include state="fstrings-single"/></rule>
+ <rule pattern="\n"><token type="LiteralStringSingle"/></rule>
+ </state>
+ <state name="tdqs">
+ <rule pattern="""""><token type="LiteralStringDouble"/><pop depth="1"/></rule>
+ <rule><include state="strings-double"/></rule>
+ <rule pattern="\n"><token type="LiteralStringDouble"/></rule>
+ </state>
+ <state name="tsqs">
+ <rule pattern="'''"><token type="LiteralStringSingle"/><pop depth="1"/></rule>
+ <rule><include state="strings-single"/></rule>
+ <rule pattern="\n"><token type="LiteralStringSingle"/></rule>
+ </state>
+ <state name="expr-inside-fstring">
+ <rule pattern="[{([]"><token type="Punctuation"/><push state="expr-inside-fstring-inner"/></rule>
+ <rule pattern="(=\s*)?\}"><token type="LiteralStringInterpol"/><pop depth="1"/></rule>
+ <rule pattern="(=\s*)?:"><token type="LiteralStringInterpol"/><pop depth="1"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule><include state="expr"/></rule>
+ </state>
+ <state name="expr-inside-fstring-inner">
+ <rule pattern="[{([]"><token type="Punctuation"/><push state="expr-inside-fstring-inner"/></rule>
+ <rule pattern="[])}]"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule><include state="expr"/></rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(into|case|type|module|internal)\b"><token type="Keyword"/></rule>
+ <rule pattern="(true|false|null)\b"><token type="KeywordConstant"/></rule>
+ </state>
+ <state name="functions">
+ <rule pattern="(min|max|sum|average|stddev|every|any|concat_array|count|lag|lead|first|last|rank|rank_dense|row_number|round|as|in|tuple_every|tuple_map|tuple_zip|_eq|_is_null|from_text|lower|upper|read_parquet|read_csv)\b"><token type="NameFunction"/></rule>
+ </state>
+ <state name="comment">
+ <rule pattern="-(?!\})"><token type="CommentMultiline"/></rule>
+ <rule pattern="\{-"><token type="CommentMultiline"/><push state="comment"/></rule>
+ <rule pattern="[^-}]"><token type="CommentMultiline"/></rule>
+ <rule pattern="-\}"><token type="CommentMultiline"/><pop depth="1"/></rule>
+ </state>
+ <state name="imports">
+ <rule pattern="\w+(\.\w+)*"><token type="NameClass"/><pop depth="1"/></rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,213 @@
+<lexer>
+ <config>
+ <name>PSL</name>
+ <alias>psl</alias>
+ <filename>*.psl</filename>
+ <filename>*.BATCH</filename>
+ <filename>*.TRIG</filename>
+ <filename>*.PROC</filename>
+ <mime_type>text/x-psl</mime_type>
+ </config>
+ <rules>
+ <!-- NameFunction|TypeName -->
+ <state name="root">
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\+|-|\*|\/|\b%\b|<|>|=|'|\band\b|\bor\b|_|:|!">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{}(,)\[\]]">
+ <token type="Punctuation"/>
+ <push state="root"/>
+ </rule>
+ <rule pattern="#">
+ <token type="KeywordPseudo"/>
+ <push state="directive"/>
+ </rule>
+ <rule pattern="\.?\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\b(do|set|if|else|for|while|quit|catch|return|ret|while)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="\btype\b">
+ <token type="KeywordDeclaration"/>
+ <push state="typename"/>
+ </rule>
+ <rule pattern="\b(public|req|private|void)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="\b(Boolean|String|Number|Date)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(\${0,2}[_a-zA-z]\w*)?(\^[_a-zA-Z]\w*)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([_a-zA-z]\w*)(\.[_a-zA-Z]\w*)(\()">
+ <bygroups>
+ <token type="Name"/>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\${0,2}[_a-zA-z]\w*)(\.[_a-zA-Z]\w*)">
+ <bygroups>
+ <token type="Name"/>
+ <token type="NameProperty"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\.?(%|\${0,2})[_a-zA-Z]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="typename">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\b(public|req|private|void)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="([_a-zA-Z]\w*)?(\s+)([_a-zA-Z]\w*)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="Name"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="directive">
+ <rule pattern="ACCEPT">
+ <token type="KeywordPseudo"/>
+ <push state="accept-directive"/>
+ </rule>
+ <rule pattern="CLASSDEF">
+ <token type="KeywordPseudo"/>
+ <push state="classdef-directive"/>
+ </rule>
+ <rule pattern="IF|ELSEIF">
+ <token type="KeywordPseudo"/>
+ <push state="if-directive"/>
+ </rule>
+ <rule pattern="PACKAGE">
+ <token type="KeywordPseudo"/>
+ <push state="package-directive"/>
+ </rule>
+ <rule pattern="PROPERTYDEF">
+ <token type="KeywordPseudo"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="INFO|WARN">
+ <token type="KeywordPseudo"/>
+ <push state="warn-directive"/>
+ </rule>
+ <rule pattern="OPTION">
+ <token type="KeywordPseudo"/>
+ <push state="option-directive"/>
+ </rule>
+ <rule pattern="BYPASS|ELSE|END|ENDBYPASS|ENDIF|OPTIMIZE">
+ <token type="KeywordPseudo"/>
+ <push state="other-directive"/>
+ </rule>
+ </state>
+ <state name="accept-directive">
+ <rule pattern=".+$">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="other-directive">
+ <rule pattern=".+$">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="classdef-directive">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="delimiter|extends">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="public">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[\w\d]+">
+ <token type="NameClass"/>
+ </rule>
+ </state>
+ <state name="if-directive">
+ <rule pattern=".+$">
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="option-directive">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="ON|OFF">
+ <token type="KeywordConstant"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\w\d]+">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="package-directive">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="Name"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,100 @@
+<lexer>
+ <config>
+ <name>Puppet</name>
+ <alias>puppet</alias>
+ <filename>*.pp</filename>
+ </config>
+ <rules>
+ <state name="strings">
+ <rule pattern=""([^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'(\\'|[^'])*'">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="names"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule>
+ <include state="operators"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule pattern="[]{}:(),;[]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="comments">
+ <rule pattern="\s*#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="operators">
+ <rule pattern="(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(in|and|or|not)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ </state>
+ <state name="names">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(\$\S+)(\[)(\S+)(\])">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Punctuation"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\$\S+">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+[eE][+-]?[0-9]+j?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[0-7]+j?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][a-fA-F0-9]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d+L">
+ <token type="LiteralNumberIntegerLong"/>
+ </rule>
+ <rule pattern="\d+j?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="keywords">
@@ -0,0 +1,593 @@
+<lexer>
+ <config>
+ <name>Python</name>
+ <alias>python</alias>
+ <alias>py</alias>
+ <alias>sage</alias>
+ <alias>python3</alias>
+ <alias>py3</alias>
+ <filename>*.py</filename>
+ <filename>*.pyi</filename>
+ <filename>*.pyw</filename>
+ <filename>*.jy</filename>
+ <filename>*.sage</filename>
+ <filename>*.sc</filename>
+ <filename>SConstruct</filename>
+ <filename>SConscript</filename>
+ <filename>*.bzl</filename>
+ <filename>BUCK</filename>
+ <filename>BUILD</filename>
+ <filename>BUILD.bazel</filename>
+ <filename>WORKSPACE</filename>
+ <filename>WORKSPACE.bzlmod</filename>
+ <filename>WORKSPACE.bazel</filename>
+ <filename>MODULE.bazel</filename>
+ <filename>REPO.bazel</filename>
+ <filename>*.tac</filename>
+ <mime_type>text/x-python</mime_type>
+ <mime_type>application/x-python</mime_type>
+ <mime_type>text/x-python3</mime_type>
+ <mime_type>application/x-python3</mime_type>
+ </config>
+ <rules>
+ <state name="numbers">
+ <rule pattern="(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)([eE][+-]?\d(?:_?\d)*)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[oO](?:_?[0-7])+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[bB](?:_?[01])+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[xX](?:_?[a-fA-F0-9])+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d(?:_?\d)*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="expr">
+ <rule pattern="(?i)(rf|fr)(""")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <combined state="rfstringescape" state="tdqf"/>
+ </rule>
+ <rule pattern="(?i)(rf|fr)(''')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <combined state="rfstringescape" state="tsqf"/>
+ </rule>
+ <rule pattern="(?i)(rf|fr)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <combined state="rfstringescape" state="dqf"/>
+ </rule>
+ <rule pattern="(?i)(rf|fr)(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <combined state="rfstringescape" state="sqf"/>
+ </rule>
+ <rule pattern="([fF])(""")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <combined state="fstringescape" state="tdqf"/>
+ </rule>
+ <rule pattern="([fF])(''')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <combined state="fstringescape" state="tsqf"/>
+ </rule>
+ <rule pattern="([fF])(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <combined state="fstringescape" state="dqf"/>
+ </rule>
+ <rule pattern="([fF])(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <combined state="fstringescape" state="sqf"/>
+ </rule>
+ <rule pattern="(?i)(rb|br|r)(""")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <push state="tdqs"/>
+ </rule>
+ <rule pattern="(?i)(rb|br|r)(''')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <push state="tsqs"/>
+ </rule>
+ <rule pattern="(?i)(rb|br|r)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <push state="dqs"/>
+ </rule>
+ <rule pattern="(?i)(rb|br|r)(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <push state="sqs"/>
+ </rule>
+ <rule pattern="([uUbB]?)(""")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <combined state="stringescape" state="tdqs"/>
+ </rule>
+ <rule pattern="([uUbB]?)(''')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <combined state="stringescape" state="tsqs"/>
+ </rule>
+ <rule pattern="([uUbB]?)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <combined state="stringescape" state="dqs"/>
+ </rule>
+ <rule pattern="([uUbB]?)(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <combined state="stringescape" state="sqs"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule pattern="!=|==|<<|>>|:=|[-~+/*%=<>&^|.]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[]{}:(),;[]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(in|is|and|or|not)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule>
+ <include state="expr-keywords"/>
+ </rule>
+ <rule>
+ <include state="builtins"/>
+ </rule>
+ <rule>
+ <include state="magicfuncs"/>
+ </rule>
+ <rule>
+ <include state="magicvars"/>
+ </rule>
+ <rule>
+ <include state="name"/>
+ </rule>
+ </state>
+ <state name="fstrings-double">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="expr-inside-fstring"/>
+ </rule>
+ <rule pattern="[^\\\'"{}\n]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[\'"\\]">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(yield from|nonlocal|continue|finally|except|lambda|assert|global|return|raise|yield|while|break|await|async|pass|else|elif|with|try|for|del|as|if|match|case)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(False|True|None)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ </state>
+ <state name="dqs">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\|\\"|\\\n">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="strings-double"/>
+ </rule>
+ </state>
+ <state name="fromimport">
+ <rule pattern="(\s+)(import)\b">
+ <bygroups>
+ <token type="Text"/>
+ <token type="KeywordNamespace"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="None\b">
+ <token type="NameBuiltinPseudo"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[_\p{L}][_\p{L}\p{N}]*">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="builtins">
+ <rule pattern="(?<!\.)(staticmethod|classmethod|memoryview|__import__|issubclass|isinstance|frozenset|bytearray|enumerate|reversed|property|compile|complex|delattr|hasattr|setattr|globals|getattr|divmod|filter|locals|format|object|sorted|slice|print|bytes|range|input|tuple|round|super|float|eval|list|dict|repr|type|vars|hash|next|bool|open|iter|oct|pow|min|zip|max|map|bin|len|set|any|dir|all|abs|str|sum|chr|int|hex|ord|id)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\.)(self|Ellipsis|NotImplemented|cls)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
@@ -0,0 +1,356 @@
+<lexer>
+ <config>
+ <name>Python 2</name>
+ <alias>python2</alias>
+ <alias>py2</alias>
+ <mime_type>text/x-python2</mime_type>
+ <mime_type>application/x-python2</mime_type>
+ </config>
+ <rules>
+ <state name="tdqs">
+ <rule pattern=""""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings-double"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="name">
+ <rule pattern="@[\w.]+">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="magicfuncs">
+ <rule pattern="(__instancecheck__|__subclasscheck__|__getattribute__|__rfloordiv__|__ifloordiv__|__setslice__|__getslice__|__contains__|__reversed__|__floordiv__|__rtruediv__|__itruediv__|__delslice__|__rlshift__|__rrshift__|__delitem__|__rdivmod__|__nonzero__|__missing__|__delattr__|__setattr__|__irshift__|__complex__|__setitem__|__getitem__|__truediv__|__unicode__|__ilshift__|__getattr__|__delete__|__coerce__|__invert__|__lshift__|__divmod__|__rshift__|__enter__|__index__|__float__|__iadd__|__rsub__|__init__|__imul__|__rpow__|__repr__|__rmul__|__isub__|__iter__|__rmod__|__ixor__|__call__|__imod__|__long__|__hash__|__rxor__|__idiv__|__iand__|__rdiv__|__ipow__|__rcmp__|__rand__|__exit__|__radd__|__str__|__cmp__|__pos__|__pow__|__oct__|__new__|__neg__|__mul__|__mod__|__set__|__xor__|__sub__|__len__|__and__|__get__|__rop__|__add__|__ior__|__div__|__iop__|__int__|__abs__|__hex__|__ror__|__del__|__eq__|__or__|__ne__|__lt__|__le__|__ge__|__gt__|__op__)\b">
+ <token type="NameFunctionMagic"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(yield from|continue|finally|lambda|assert|global|except|return|print|yield|while|break|raise|elif|pass|exec|else|with|try|for|del|as|if)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="tsqs">
+ <rule pattern="'''">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings-single"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="stringescape">
+ <rule pattern="\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+[eE][+-]?[0-9]+j?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[0-7]+j?">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[bB][01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[xX][a-fA-F0-9]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="\d+L">
+ <token type="LiteralNumberIntegerLong"/>
+ </rule>
+ <rule pattern="\d+j?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="(?:[ \t]|\\\n)+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="as\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[a-zA-Z_][\w.]*">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="magicvars">
+ <rule pattern="(__metaclass__|__defaults__|__globals__|__closure__|__weakref__|__module__|__slots__|__class__|__bases__|__file__|__func__|__dict__|__name__|__self__|__code__|__mro__|__doc__)\b">
+ <token type="NameVariableMagic"/>
+ </rule>
+ </state>
+ <state name="fromimport">
+ <rule pattern="(?:[ \t]|\\\n)+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="import\b">
+ <token type="KeywordNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="None\b">
+ <token type="NameBuiltinPseudo"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-zA-Z_.][\w.]*">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="strings-single">
+ <rule pattern="%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="[^\\\'"%\n]+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="[\'"\\]">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="%">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="funcname">
+ <rule>
+ <include state="magicfuncs"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="backtick">
+ <rule pattern="`.*?`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ <state name="strings-double">
+ <rule pattern="%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?[hlL]?[E-GXc-giorsux%]">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="[^\\\'"%\n]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[\'"\\]">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="%">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="dqs">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\|\\"|\\\n">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="strings-double"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDoc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDoc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\A#!.+$">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="[]{}:(),;[]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(in|is|and|or|not)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="!=|==|<<|>>|[-~+/*%=<>&^|.]">
+ <token type="Operator"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule pattern="(def)((?:\s|\\\s)+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="(class)((?:\s|\\\s)+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(from)((?:\s|\\\s)+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="fromimport"/>
+ </rule>
+ <rule pattern="(import)((?:\s|\\\s)+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule>
+ <include state="builtins"/>
+ </rule>
+ <rule>
+ <include state="magicfuncs"/>
+ </rule>
+ <rule>
+ <include state="magicvars"/>
+ </rule>
+ <rule>
+ <include state="backtick"/>
+ </rule>
+ <rule pattern="([rR]|[uUbB][rR]|[rR][uUbB])(""")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <push state="tdqs"/>
+ </rule>
+ <rule pattern="([rR]|[uUbB][rR]|[rR][uUbB])(''')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <push state="tsqs"/>
+ </rule>
+ <rule pattern="([rR]|[uUbB][rR]|[rR][uUbB])(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <push state="dqs"/>
+ </rule>
+ <rule pattern="([rR]|[uUbB][rR]|[rR][uUbB])(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <push state="sqs"/>
+ </rule>
+ <rule pattern="([uUbB]?)(""")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <combined state="stringescape" state="tdqs"/>
+ </rule>
+ <rule pattern="([uUbB]?)(''')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <combined state="stringescape" state="tsqs"/>
+ </rule>
+ <rule pattern="([uUbB]?)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringDouble"/>
+ </bygroups>
+ <combined state="stringescape" state="dqs"/>
+ </rule>
+ <rule pattern="([uUbB]?)(')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringSingle"/>
+ </bygroups>
+ <combined state="stringescape" state="sqs"/>
+ </rule>
+ <rule>
+ <include state="name"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ </state>
+ <state name="sqs">
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\|\\'|\\\n">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="strings-single"/>
+ </rule>
+ </state>
+ <state name="builtins">
+ <rule pattern="(?<!\.)(staticmethod|classmethod|__import__|isinstance|basestring|issubclass|frozenset|raw_input|bytearray|enumerate|property|callable|reversed|execfile|hasattr|setattr|compile|complex|delattr|unicode|globals|getattr|unichr|reduce|xrange|buffer|intern|filter|locals|divmod|coerce|sorted|reload|object|slice|round|float|super|input|bytes|apply|tuple|range|iter|dict|long|type|hash|vars|next|file|exit|open|repr|eval|bool|list|bin|pow|zip|ord|oct|min|set|any|max|map|all|len|sum|int|dir|hex|chr|abs|cmp|str|id)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="(?<!\.)(PendingDeprecationWarning|UnicodeTranslateError|NotImplementedError|UnicodeDecodeError|DeprecationWarning|UnicodeEncodeError|FloatingPointError|ZeroDivisionError|UnboundLocalError|KeyboardInterrupt|EnvironmentError|IndentationError|OverflowWarning|ArithmeticError|ReferenceError|AttributeError|AssertionError|RuntimeWarning|UnicodeWarning|GeneratorExit|SyntaxWarning|StandardError|BaseException|OverflowError|FutureWarning|ImportWarning|StopIteration|UnicodeError|WindowsError|RuntimeError|ImportError|UserWarning|LookupError|SyntaxError|SystemError|MemoryError|SystemExit|ValueError|IndexError|NameError|Exception|TypeError|EOFError|KeyError|VMSError|TabError|IOError|Warning|OSError)\b">
+ <token type="NameException"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,173 @@
+<lexer>
+ <config>
+ <name>QBasic</name>
+ <alias>qbasic</alias>
+ <alias>basic</alias>
+ <filename>*.BAS</filename>
+ <filename>*.bas</filename>
+ <mime_type>text/basic</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="^(\s*)(\d*)(\s*)(REM .*)$">
+ <bygroups>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ <token type="CommentSingle"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)(\d+)(\s*)">
+ <bygroups>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?=[\s]*)(\w+)(?=[\s]*=)">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="(?=[^"]*)\'.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern=""[^\n"]*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="(END)(\s+)(FUNCTION|IF|SELECT|SUB)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="TextWhitespace"/>
+ <token type="KeywordReserved"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariable"/>
+ <token type="TextWhitespace"/>
+ <token type="Name"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(DIM)(\s+)(SHARED)(\s+)([^\s(]+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariable"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariableGlobal"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(DIM)(\s+)([^\s(]+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariableGlobal"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)([a-zA-Z_]+)(\s*)(\=)">
+ <bygroups>
+ <token type="TextWhitespace"/>
+ <token type="NameVariableGlobal"/>
+ <token type="TextWhitespace"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(GOTO|GOSUB)(\s+)(\w+\:?)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(SUB)(\s+)(\w+\:?)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="declarations"/>
+ </rule>
+ <rule>
+ <include state="functions"/>
+ </rule>
+ <rule>
+ <include state="metacommands"/>
+ </rule>
+ <rule>
+ <include state="operators"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*[$@#&!]">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*\:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="\-?\d*\.\d+[@|#]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\-?\d+[@|#]">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\-?\d+#?">
+ <token type="LiteralNumberIntegerLong"/>
+ </rule>
+ <rule pattern="\-?\d+#?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[\[\]{}(),;]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[\w]+">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ </state>
+ <state name="declarations">
+ <rule pattern="\b(DATA|LET)(?=\(|\b)">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ </state>
+ <state name="functions">
+ <rule pattern="\b(ABS|ASC|ATN|CDBL|CHR\$|CINT|CLNG|COMMAND\$|COS|CSNG|CSRLIN|CVD|CVDMBF|CVI|CVL|CVS|CVSMBF|DATE\$|ENVIRON\$|EOF|ERDEV|ERDEV\$|ERL|ERR|EXP|FILEATTR|FIX|FRE|FREEFILE|HEX\$|INKEY\$|INP|INPUT\$|INSTR|INT|IOCTL\$|LBOUND|LCASE\$|LEFT\$|LEN|LOC|LOF|LOG|LPOS|LTRIM\$|MID\$|MKD\$|MKDMBF\$|MKI\$|MKL\$|MKS\$|MKSMBF\$|OCT\$|PEEK|PEN|PLAY|PMAP|POINT|POS|RIGHT\$|RND|RTRIM\$|SADD|SCREEN|SEEK|SETMEM|SGN|SIN|SPACE\$|SPC|SQR|STICK|STR\$|STRIG|STRING\$|TAB|TAN|TIME\$|TIMER|UBOUND|UCASE\$|VAL|VARPTR|VARPTR\$|VARSEG)(?=\(|\b)">
+ <token type="KeywordReserved"/>
+ </rule>
+ </state>
+ <state name="metacommands">
+ <rule pattern="\b(\$DYNAMIC|\$INCLUDE|\$STATIC)(?=\(|\b)">
+ <token type="KeywordConstant"/>
+ </rule>
+ </state>
+ <state name="operators">
+ <rule pattern="\b(AND|EQV|IMP|NOT|OR|XOR)(?=\(|\b)">
+ <token type="OperatorWord"/>
+ </rule>
+ </state>
+ <state name="statements">
+ <rule pattern="\b(BEEP|BLOAD|BSAVE|CALL|CALL\ ABSOLUTE|CALL\ INTERRUPT|CALLS|CHAIN|CHDIR|CIRCLE|CLEAR|CLOSE|CLS|COLOR|COM|COMMON|CONST|DATA|DATE\$|DECLARE|DEF\ FN|DEF\ SEG|DEFDBL|DEFINT|DEFLNG|DEFSNG|DEFSTR|DEF|DIM|DO|LOOP|DRAW|END|ENVIRON|ERASE|ERROR|EXIT|FIELD|FILES|FOR|NEXT|FUNCTION|GET|GOSUB|GOTO|IF|THEN|INPUT|INPUT\ \#|IOCTL|KEY|KEY|KILL|LET|LINE|LINE\ INPUT|LINE\ INPUT\ \#|LOCATE|LOCK|UNLOCK|LPRINT|LSET|MID\$|MKDIR|NAME|ON\ COM|ON\ ERROR|ON\ KEY|ON\ PEN|ON\ PLAY|ON\ STRIG|ON\ TIMER|ON\ UEVENT|ON|OPEN|OPEN\ COM|OPTION\ BASE|OUT|PAINT|PALETTE|PCOPY|PEN|PLAY|POKE|PRESET|PRINT|PRINT\ \#|PRINT\ USING|PSET|PUT|PUT|RANDOMIZE|READ|REDIM|REM|RESET|RESTORE|RESUME|RETURN|RMDIR|RSET|RUN|SCREEN|SEEK|SELECT\ CASE|SHARED|SHELL|SLEEP|SOUND|STATIC|STOP|STRIG|SUB|SWAP|SYSTEM|TIME\$|TIMER|TROFF|TRON|TYPE|UEVENT|UNLOCK|VIEW|WAIT|WHILE|WEND|WIDTH|WINDOW|WRITE)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="\b(ACCESS|ALIAS|ANY|APPEND|AS|BASE|BINARY|BYVAL|CASE|CDECL|DOUBLE|ELSE|ELSEIF|ENDIF|INTEGER|IS|LIST|LOCAL|LONG|LOOP|MOD|NEXT|OFF|ON|OUTPUT|RANDOM|SIGNAL|SINGLE|STEP|STRING|THEN|TO|UNTIL|USING|WEND)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,113 @@
+<lexer>
+ <config>
+ <name>QML</name>
+ <alias>qml</alias>
+ <alias>qbs</alias>
+ <filename>*.qml</filename>
+ <filename>*.qbs</filename>
+ <mime_type>application/x-qml</mime_type>
+ <mime_type>application/x-qt.qbs+qml</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^(?=\s|/|<!--)">
+ <token type="Text"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\bid\s*:\s*[A-Za-z][\w.]*">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="\b[A-Za-z][\w.]*\s*:">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(var|let|with|function)\b">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false|null|NaN|Infinity|undefined)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[$a-zA-Z_]\w*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=/)">
+ <token type="Text"/>
+ <push state="#pop" state="badregex"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="badregex">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,128 @@
+<lexer>
+ <config>
+ <name>R</name>
+ <alias>splus</alias>
+ <alias>s</alias>
+ <alias>r</alias>
+ <filename>*.S</filename>
+ <filename>*.R</filename>
+ <filename>*.r</filename>
+ <filename>.Rhistory</filename>
+ <filename>.Rprofile</filename>
+ <filename>.Renviron</filename>
+ <mime_type>text/S-plus</mime_type>
+ <mime_type>text/S</mime_type>
+ <mime_type>text/x-r-source</mime_type>
+ <mime_type>text/x-r</mime_type>
+ <mime_type>text/x-R</mime_type>
+ <mime_type>text/x-r-history</mime_type>
+ <mime_type>text/x-r-profile</mime_type>
+ <priority>0.1</priority> <!-- higher priority than Rebol -->
+ </config>
+ <rules>
+ <state name="numbers">
+ <rule pattern="0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ </state>
+ <state name="operators">
+ <rule pattern="<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule pattern="((?:`[^`\\]*(?:\\.[^`\\]*)*`)|(?:(?:[a-zA-z]|[_.][^0-9])[\w_.]*))\s*(?=\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule>
+ <include state="statements"/>
+ </rule>
+ <rule pattern="\{|\}">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="valid_name">
+ <rule pattern="(?:`[^`\\]*(?:\\.[^`\\]*)*`)|(?:(?:[a-zA-z]|[_.][^0-9])[\w_.]*)">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(if|else|for|while|repeat|in|next|break|return|switch|function)(?![\w.])">
+ <token type="KeywordReserved"/>
+ </rule>
+ </state>
+ <state name="builtin_symbols">
+ <rule pattern="(NULL|NA(_(integer|real|complex|character)_)?|letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))(?![\w.])">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(T|F)\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ </state>
+ <state name="string_squote">
+ <rule pattern="([^\'\\]|\\.)*\'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="comments">
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="punctuation">
+ <rule pattern="\[{1,2}|\]{1,2}|\(|\)|;|,">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="statements">
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\'">
+ <token type="LiteralString"/>
+ <push state="string_squote"/>
+ </rule>
+ <rule pattern="\"">
+ <token type="LiteralString"/>
+ <push state="string_dquote"/>
+ </rule>
+ <rule>
+ <include state="builtin_symbols"/>
+ </rule>
+ <rule>
+ <include state="valid_name"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule>
+ <include state="operators"/>
+ </rule>
+ </state>
+ <state name="string_dquote">
+ <rule pattern="([^"\\]|\\.)*"">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,260 @@
+<lexer>
+ <config>
+ <name>Racket</name>
+ <alias>racket</alias>
+ <alias>rkt</alias>
+ <filename>*.rkt</filename>
+ <filename>*.rktd</filename>
+ <filename>*.rktl</filename>
+ <mime_type>text/x-racket</mime_type>
+ <mime_type>application/x-racket</mime_type>
+ </config>
+ <rules>
+ <state name="datum*">
+ <rule pattern="`|,@?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(?:\|[^|]*\||\\[\w\W]|[^|\\()[\]{}",\'`;\s]+)+">
+ <token type="LiteralStringSymbol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[|\\]">
+ <token type="Error"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="quoted-list">
+ <rule>
+ <include state="list"/>
+ </rule>
+ <rule pattern="(?!\Z)">
+ <token type="Text"/>
+ <push state="quoted-datum"/>
+ </rule>
+ </state>
+ <state name="quasiquoted-list">
+ <rule>
+ <include state="list"/>
+ </rule>
+ <rule pattern="(?!\Z)">
+ <token type="Text"/>
+ <push state="quasiquoted-datum"/>
+ </rule>
+ </state>
+ <state name="quoted-datum">
+ <rule>
+ <include state="datum"/>
+ </rule>
+ <rule pattern="[([{]">
+ <token type="Punctuation"/>
+ <push state="#pop" state="quoted-list"/>
+ </rule>
+ <rule>
+ <include state="datum*"/>
+ </rule>
+ </state>
+ <state name="block-comment">
+ <rule pattern="#\|">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\|#">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^#|]+|.">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="datum">
+ <rule pattern="(?s)#;|#*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern=";[^\n\r
]*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="#\|">
+ <token type="CommentMultiline"/>
+ <push state="block-comment"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(?i)(?:#e)?(?:#d)?(?:#e)?[-+]?\d+(?=[()[\]{}",\'`;\s])">
+ <token type="LiteralNumberInteger"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?i)(?:#e)?(?:#d)?(?:#e)?[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[()[\]{}",\'`;\s])">
+ <token type="LiteralNumberFloat"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?i)(?:#e)?(?:#d)?(?:#e)?[-+]?((?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)(?:[defls][-+]?\d+)?)([-+](?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)(?:[defls][-+]?\d+)?)?i)?|[-+](?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)(?:[defls][-+]?\d+)?)?i)(?=[()[\]{}",\'`;\s])">
+ <token type="LiteralNumber"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?i)(#d)?((?:[-+]?(?:(?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|\d+(?:\.\d*#+|/\d+#+)))(?:[defls][-+]?\d+)?)|[-+](?:(?:inf|nan)\.[0f]))([-+](?:(?:(?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|\d+(?:\.\d*#+|/\d+#+)))(?:[defls][-+]?\d+)?)|(?:(?:inf|nan)\.[0f]))?i)?|[-+](?:(?:(?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|\d+(?:\.\d*#+|/\d+#+)))(?:[defls][-+]?\d+)?)|(?:(?:inf|nan)\.[0f]))?i|(?:[-+]?(?:(?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|\d+(?:\.\d*#+|/\d+#+)))(?:[defls][-+]?\d+)?)|[-+](?:(?:inf|nan)\.[0f]))@(?:[-+]?(?:(?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|\d+(?:\.\d*#+|/\d+#+)))(?:[defls][-+]?\d+)?)|[-+](?:(?:inf|nan)\.[0f])))(?=[()[\]{}",\'`;\s])">
+ <token type="LiteralNumberFloat"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?i)(([-+]?(?:(?:\d+(?:/\d+|\.\d*)?|\.\d+)|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|\d+(?:\.\d*#+|/\d+#+)))t[-+]?\d+)|[-+](inf|nan)\.t)(?=[()[\]{}",\'`;\s])">
+ <token type="LiteralNumberFloat"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?i)(#[ei])?#b(?:\|[^|]*\||\\[\w\W]|[^|\\()[\]{}",\'`;\s]+)+">
+ <token type="LiteralNumberBin"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?i)(#[ei])?#o(?:\|[^|]*\||\\[\w\W]|[^|\\()[\]{}",\'`;\s]+)+">
+ <token type="LiteralNumberOct"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?i)(#[ei])?#x(?:\|[^|]*\||\\[\w\W]|[^|\\()[\]{}",\'`;\s]+)+">
+ <token type="LiteralNumberHex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?i)(#d)?#i(?:\|[^|]*\||\\[\w\W]|[^|\\()[\]{}",\'`;\s]+)+">
+ <token type="LiteralNumberFloat"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="#?"">
+ <token type="LiteralStringDouble"/>
+ <push state="#pop" state="string"/>
+ </rule>
+ <rule pattern="#<<(.+)\n(^(?!\1$).*$\n)*^\1$">
+ <token type="LiteralStringHeredoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?is)#\\([0-7]{3}|[a-z]+|.)">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?s)#[pr]x#?"(\\?.)*?"">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="#(true|false|[tTfF])">
+ <token type="NameConstant"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="#:(?:\|[^|]*\||\\[\w\W]|[^|\\()[\]{}",\'`;\s]+)+">
+ <token type="KeywordDeclaration"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(#lang |#!)(\S+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="#reader">
+ <token type="KeywordNamespace"/>
+ <push state="quoted-datum"/>
+ </rule>
+ <rule pattern="(?i)\.(?=[()[\]{}",\'`;\s])|#c[is]|#['`]|#,@?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="'|#[s&]|#hash(eqv?)?|#\d*(?=[([{])">
+ <token type="Operator"/>
+ <push state="#pop" state="quoted-datum"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8}|.)">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[)\]}]">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="(?!\Z)">
+ <token type="Text"/>
+ <push state="unquoted-datum"/>
+ </rule>
+ </state>
+ <state name="list">
+ <rule pattern="[)\]}]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="unquoted-datum">
+ <rule>
+ <include state="datum"/>
+ </rule>
+ <rule pattern="quote(?=[()[\]{}",\'`;\s])">
+ <token type="Keyword"/>
+ <push state="#pop" state="quoted-datum"/>
+ </rule>
+ <rule pattern="`">
+ <token type="Operator"/>
+ <push state="#pop" state="quasiquoted-datum"/>
+ </rule>
+ <rule pattern="quasiquote(?=[()[\]{}",\'`;\s])">
+ <token type="Keyword"/>
+ <push state="#pop" state="quasiquoted-datum"/>
+ </rule>
+ <rule pattern="[([{]">
+ <token type="Punctuation"/>
+ <push state="#pop" state="unquoted-list"/>
+ </rule>
@@ -0,0 +1,149 @@
+<lexer>
+ <config>
+ <name>Ragel</name>
+ <alias>ragel</alias>
+ </config>
+ <rules>
+ <state name="host">
+ <rule pattern="([^{}\'"/#]+|[^\\]\\[{}]|"(\\\\|\\"|[^"])*"|'(\\\\|\\'|[^'])*'|//.*$\n?|/\*(.|\n)*?\*/|\#.*$\n?|/(?!\*)(\\\\|\\/|[^/])*/|/)+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="0x[0-9A-Fa-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[+-]?[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="literals">
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\[(\\\\|\\\]|[^\]])*\]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="/(?!\*)(\\\\|\\/|[^/])*/">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(access|action|alphtype)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(getkey|write|machine|include)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(any|ascii|extend|alpha|digit|alnum|lower|upper)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="identifiers">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="literals"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule>
+ <include state="identifiers"/>
+ </rule>
+ <rule>
+ <include state="operators"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <push state="host"/>
+ </rule>
+ <rule pattern="=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="comments">
+ <rule pattern="\#.*$">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="operators">
+ <rule pattern=",">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\||&|--?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\.|<:|:>>?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="->">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(>|\$|%|<|@|<>)(/|eof\b)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(>|\$|%|<|@|<>)(!|err\b)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(>|\$|%|<|@|<>)(\^|lerr\b)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(>|\$|%|<|@|<>)(~|to\b)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(>|\$|%|<|@|<>)(\*|from\b)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=">|@|\$|%">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\*|\?|\+|\{[0-9]*,[0-9]*\}">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="!|\^">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\(|\)">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,236 @@
+<lexer>
+ <config>
+ <name>react</name>
+ <alias>jsx</alias>
+ <alias>react</alias>
+ <filename>*.jsx</filename>
+ <filename>*.react</filename>
+ <mime_type>text/jsx</mime_type>
+ <mime_type>text/typescript-jsx</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gimuy]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=/)">
+ <token type="Text"/>
+ <push state="#pop" state="badregex"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="tag">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="([\w]+\s*)(=)(\s*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="attr"/>
+ </rule>
+ <rule pattern="[{}]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[\w\.]+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(/?)(\s*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="expression">
+ <rule pattern="{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="attr">
+ <rule pattern="{">
+ <token type="Punctuation"/>
+ <push state="expression"/>
+ </rule>
+ <rule pattern="".*?"">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="'.*?'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="interp-inside">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="badregex">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="jsx">
+ <rule pattern="(<)(/?)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Punctuation"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<)([\w\.]+)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="(<)(/)([\w\.]+)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="jsx"/>
+ </rule>
+ <rule pattern="\A#! ?/.*?\n">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="^(?=\s|/|<!--)">
+ <token type="Text"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="(\.\d+|[0-9]+\.[0-9]*)([eE][-+]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[bB][01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[oO][0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\.\.\.|=>">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|this|of)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(var|let|with|function)\b">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(abstract|async|await|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false|null|NaN|Infinity|undefined)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|Promise|Proxy|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|document|this|window)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?:[$_\p{L}\p{N}]|\\u[a-fA-F0-9]{4})(?:(?:[$\p{L}\p{N}]|\\u[a-fA-F0-9]{4}))*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <push state="interp"/>
+ </rule>
+ </state>
+ <state name="interp">
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\\`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interp-inside"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="[^`\\$]+">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,147 @@
+<lexer>
+ <config>
+ <name>ReasonML</name>
+ <alias>reason</alias>
+ <alias>reasonml</alias>
+ <filename>*.re</filename>
+ <filename>*.rei</filename>
+ <mime_type>text/x-reasonml</mime_type>
+ </config>
+ <rules>
+ <state name="escape-sequence">
+ <rule pattern="\\[\\"\'ntbr]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\[0-9]{3}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\x[0-9a-fA-F]{2}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="false|true|\(\)|\[\]">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="\b([A-Z][\w\']*)(?=\s*\.)">
+ <token type="NameNamespace"/>
+ <push state="dotted"/>
+ </rule>
+ <rule pattern="\b([A-Z][\w\']*)">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="\/\*(?![\/])">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\b(as|assert|begin|class|constraint|do|done|downto|else|end|exception|external|false|for|fun|esfun|function|functor|if|in|include|inherit|initializer|lazy|let|switch|module|pub|mutable|new|nonrec|object|of|open|pri|rec|sig|struct|then|to|true|try|type|val|virtual|when|while|with)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(~|\}|\|]|\||\|\||\{<|\{|`|_|]|\[\||\[>|\[<|\[|\?\?|\?|>\}|>]|>|=|<-|<|;;|;|:>|:=|::|:|\.\.\.|\.\.|\.|=>|-\.|-|,|\+|\*|\)|\(|&&|&|#|!=)">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="([=<>@^|&+\*/$%-]|[!?~])?[!$%&*+\./:<=>?@^|~-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(and|asr|land|lor|lsl|lsr|lxor|mod|or)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="\b(unit|int|float|bool|string|char|list|array)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[^\W\d][\w']*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[xX][\da-fA-F][\da-fA-F_]*">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[oO][0-7][0-7_]*">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[bB][01][01_]*">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="\d[\d_]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'.'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[~?][a-z][\w\']*:">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^\/*]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\/\*">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\*\/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\*]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^\\"]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule>
+ <include state="escape-sequence"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="dotted">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[A-Z][\w\']*(?=\s*\.)">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="[A-Z][\w\']*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-z_][\w\']*">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,68 @@
+<lexer>
+ <config>
+ <name>reg</name>
+ <alias>registry</alias>
+ <filename>*.reg</filename>
+ <mime_type>text/x-windows-registry</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="Windows Registry Editor.*">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[;#].*">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Operator"/>
+ <token type="NameBuiltin"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="value"/>
+ </rule>
+ <rule pattern="(.*?)([ \t]*)(=)([ \t]*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="value"/>
+ </rule>
+ </state>
+ <state name="value">
+ <rule pattern="-">
+ <token type="Operator"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Punctuation"/>
+ <token type="LiteralNumber"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".+">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,94 @@
+<lexer>
+ <config>
+ <name>Rego</name>
+ <alias>rego</alias>
+ <filename>*.rego</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(package|import|as|not|with|default|else|some|in|if|contains)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <!-- importing keywords should then show up as keywords -->
+ <rule pattern="(import)( future.keywords.)(\w+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ <token type="KeywordDeclaration"/>
+ </bygroups>
+ </rule>
+ <rule pattern="#[^\r\n]*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(FIXME|TODO|XXX)\b( .*)$">
+ <bygroups>
+ <token type="Error"/>
+ <token type="CommentSpecial"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+\.\d*([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\.\d+([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+[Ee][-+]\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\.\d+([eE][+\-]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(0|[1-9][0-9]*)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="""".*?"""">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\$/((?!/\$).)*/\$">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="/(\\\\|\\"|[^/])*/">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="^(\w+)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[a-z_-][\w-]*(?=\()">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="[\r\n\s]+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="(package|import)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[=<>!+-/*&|]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=":=">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[[\]{}():;]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[$a-zA-Z_]\w*">
+ <token type="NameOther"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,127 @@
+<lexer>
+ <config>
+ <name>Rexx</name>
+ <alias>rexx</alias>
+ <alias>arexx</alias>
+ <filename>*.rexx</filename>
+ <filename>*.rex</filename>
+ <filename>*.rx</filename>
+ <filename>*.arexx</filename>
+ <mime_type>text/x-rexx</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="keyword">
+ <rule pattern="(address|arg|by|call|do|drop|else|end|exit|for|forever|if|interpret|iterate|leave|nop|numeric|off|on|options|parse|pull|push|queue|return|say|select|signal|to|then|trace|until|while)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ </state>
+ <state name="operator">
+ <rule pattern="(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|¬>>|¬>|¬|\.|,)">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ <state name="string_double">
+ <rule pattern="[^"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="""">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string_single">
+ <rule pattern="[^\'\n]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\'\'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^*]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\*">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string_double"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="string_single"/>
+ </rule>
+ <rule pattern="[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="TextWhitespace"/>
+ <token type="Operator"/>
+ <token type="TextWhitespace"/>
+ <token type="KeywordDeclaration"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([a-z_]\w*)(\s*)(:)">
+ <bygroups>
+ <token type="NameLabel"/>
+ <token type="TextWhitespace"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="function"/>
+ </rule>
+ <rule>
+ <include state="keyword"/>
+ </rule>
+ <rule>
+ <include state="operator"/>
+ </rule>
+ <rule pattern="[a-z_]\w*">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="function">
+ <rule pattern="(sourceline|wordlength|errortext|translate|wordindex|condition|datatype|subword|lineout|lastpos|delword|address|charout|wordpos|compare|overlay|reverse|symbol|stream|charin|center|delstr|verify|digits|abbrev|bitxor|format|random|insert|bitand|queued|length|linein|substr|copies|xrange|space|words|lines|bitor|trunc|strip|right|value|chars|trace|sign|form|fuzz|word|left|time|date|c2d|d2c|d2x|c2x|pos|b2x|arg|abs|min|x2b|x2c|x2d|max)(\s*)(\()">
+ <bygroups>
+ <token type="NameBuiltin"/>
+ <token type="TextWhitespace"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,58 @@
+
+<lexer>
+ <config>
+ <name>RPMSpec</name>
+ <alias>spec</alias>
+ <filename>*.spec</filename>
+ <mime_type>text/x-rpm-spec</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="#.*$"><token type="Comment"/></rule>
+ <rule><include state="basic"/></rule>
+ </state>
+ <state name="description">
+ <rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups><pop depth="1"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="."><token type="Text"/></rule>
+ </state>
+ <state name="changelog">
+ <rule pattern="\*.*$"><token type="GenericSubheading"/></rule>
+ <rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups><pop depth="1"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="."><token type="Text"/></rule>
+ </state>
+ <state name="string">
+ <rule pattern="""><token type="LiteralStringDouble"/><pop depth="1"/></rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
+ <rule><include state="interpol"/></rule>
+ <rule pattern="."><token type="LiteralStringDouble"/></rule>
+ </state>
+ <state name="basic">
+ <rule><include state="macro"/></rule>
+ <rule pattern="(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$"><bygroups><token type="GenericHeading"/><token type="Punctuation"/><usingself state="root"/></bygroups></rule>
+ <rule pattern="^%description"><token type="NameDecorator"/><push state="description"/></rule>
+ <rule pattern="^%changelog"><token type="NameDecorator"/><push state="changelog"/></rule>
+ <rule pattern="^(%(?:package|prep|build|install|clean|check|pre[a-z]*|post[a-z]*|trigger[a-z]*|files))(.*)$"><bygroups><token type="NameDecorator"/><token type="Text"/></bygroups></rule>
+ <rule pattern="%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)"><token type="Keyword"/></rule>
+ <rule><include state="interpol"/></rule>
+ <rule pattern="'.*?'"><token type="LiteralStringSingle"/></rule>
+ <rule pattern="""><token type="LiteralStringDouble"/><push state="string"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="."><token type="Text"/></rule>
+ </state>
+ <state name="macro">
+ <rule pattern="%define.*$"><token type="CommentPreproc"/></rule>
+ <rule pattern="%\{\!\?.*%define.*\}"><token type="CommentPreproc"/></rule>
+ <rule pattern="(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$"><bygroups><token type="CommentPreproc"/><token type="Text"/></bygroups></rule>
+ </state>
+ <state name="interpol">
+ <rule pattern="%\{?__[a-z_]+\}?"><token type="NameFunction"/></rule>
+ <rule pattern="%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?"><token type="KeywordPseudo"/></rule>
+ <rule pattern="%\{\?\w+\}"><token type="NameVariable"/></rule>
+ <rule pattern="\$\{?RPM_[A-Z0-9_]+\}?"><token type="NameVariableGlobal"/></rule>
+ <rule pattern="%\{[a-zA-Z]\w+\}"><token type="KeywordConstant"/></rule>
+ </state>
+ </rules>
+</lexer>
+
@@ -0,0 +1,724 @@
+<lexer>
+ <config>
+ <name>Ruby</name>
+ <alias>rb</alias>
+ <alias>ruby</alias>
+ <alias>duby</alias>
+ <filename>*.rb</filename>
+ <filename>*.rbw</filename>
+ <filename>Rakefile</filename>
+ <filename>*.rake</filename>
+ <filename>*.gemspec</filename>
+ <filename>*.rbx</filename>
+ <filename>*.duby</filename>
+ <filename>Gemfile</filename>
+ <filename>Vagrantfile</filename>
+ <mime_type>text/x-ruby</mime_type>
+ <mime_type>application/x-ruby</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="simple-sym">
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[^\\"#]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringSymbol"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="interpolated-regex">
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="strings">
+ <rule pattern="\:@{0,2}[a-zA-Z_]\w*[!?]?">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="\:@{0,2}(===|\[\]=|<=>|\*\*|==|>=|\+@|<>|>>|<<|-@|\[\]|~|`|\^|\||&|<|%|/|>|\+|-|\*)">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern=":'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern=":"">
+ <token type="LiteralStringSymbol"/>
+ <push state="simple-sym"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*)(:)(?!:)">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="simple-string"/>
+ </rule>
+ <rule pattern="(?<!\.)`">
+ <token type="LiteralStringBacktick"/>
+ <push state="simple-backtick"/>
+ </rule>
+ <rule pattern="%[QWx]?\{">
+ <token type="LiteralStringOther"/>
+ <push state="cb-intp-string"/>
+ </rule>
+ <rule pattern="%[qsw]\{">
+ <token type="LiteralStringOther"/>
+ <push state="cb-string"/>
+ </rule>
+ <rule pattern="%r\{">
+ <token type="LiteralStringRegex"/>
+ <push state="cb-regex"/>
+ </rule>
+ <rule pattern="%[QWx]?\[">
+ <token type="LiteralStringOther"/>
+ <push state="sb-intp-string"/>
+ </rule>
+ <rule pattern="%[qsw]\[">
+ <token type="LiteralStringOther"/>
+ <push state="sb-string"/>
+ </rule>
+ <rule pattern="%r\[">
+ <token type="LiteralStringRegex"/>
+ <push state="sb-regex"/>
+ </rule>
+ <rule pattern="%[QWx]?\(">
+ <token type="LiteralStringOther"/>
+ <push state="pa-intp-string"/>
+ </rule>
+ <rule pattern="%[qsw]\(">
+ <token type="LiteralStringOther"/>
+ <push state="pa-string"/>
+ </rule>
+ <rule pattern="%r\(">
+ <token type="LiteralStringRegex"/>
+ <push state="pa-regex"/>
+ </rule>
+ <rule pattern="%[QWx]?<">
+ <token type="LiteralStringOther"/>
+ <push state="ab-intp-string"/>
+ </rule>
+ <rule pattern="%[qsw]<">
+ <token type="LiteralStringOther"/>
+ <push state="ab-string"/>
+ </rule>
+ <rule pattern="%r<">
+ <token type="LiteralStringRegex"/>
+ <push state="ab-regex"/>
+ </rule>
+ <rule pattern="(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringOther"/>
+ <token type="None"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringOther"/>
+ <token type="None"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="simple-backtick">
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[^\\`#]+">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="ab-regex">
+ <rule pattern="\\[\\<>]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="<">
+ <token type="LiteralStringRegex"/>
+ <push/>
+ </rule>
+ <rule pattern=">[mixounse]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#<>]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#<>]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="cb-regex">
+ <rule pattern="\\[\\{}]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="LiteralStringRegex"/>
+ <push/>
+ </rule>
+ <rule pattern="\}[mixounse]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#{}]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#{}]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="end-part">
+ <rule pattern=".+">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string-intp">
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="in-intp"/>
+ </rule>
+ <rule pattern="#@@?[a-zA-Z_]\w*">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="#\$[a-zA-Z_]\w*">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ </state>
+ <state name="interpolated-string">
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="defexpr"/>
+ </rule>
+ <rule pattern="<<">
+ <token type="Operator"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[A-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="cb-intp-string">
+ <rule pattern="\\[\\{}]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[\\#{}]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#{}]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\A#!.+?$">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="#.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="=begin\s.*?\n=end.*?$">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(defined\?|return|ensure|rescue|unless|undef|until|break|begin|elsif|super|alias|while|retry|BEGIN|raise|yield|redo|next|case|when|then|else|end|for|END|do|if|in)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(module)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(def)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="def(?=[*%&^`~+-/\[<>=])">
+ <token type="Keyword"/>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="(class)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(module_function|attr_accessor|attr_reader|attr_writer|initialize|protected|include|private|extend|public|raise|false|catch|throw|attr|loop|true|new|nil)\b">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="(not|and|or)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(protected_method_defined|private_method_defined|public_method_defined|method_defined|const_defined|block_given|instance_of|respond_to|iterator|autoload|kind_of|tainted|include|frozen|equal|is_a|nil|eql)\?">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(chomp|chop|exit|gsub|sub)!">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?<!\.)(protected_instance_methods|private_instance_methods|public_instance_methods|instance_variable_set|instance_variable_get|private_class_method|public_class_method|instance_variables|protected_methods|singleton_methods|included_modules|instance_methods|global_variables|private_methods|local_variables|instance_method|class_variables|public_methods|const_defined\?|set_trace_func|method_missing|const_missing|instance_eval|module_eval|untrace_var|class_eval|trace_var|const_get|readlines|ancestors|constants|const_set|object_id|readline|autoload|__send__|untaint|methods|display|Integer|sprintf|inspect|require|syscall|at_exit|binding|extend|printf|lambda|__id__|String|callcc|method|select|format|system|freeze|caller|raise|Float|print|throw|taint|clone|srand|Array|abort|split|catch|chomp|sleep|open|puts|putc|fork|fail|trap|exit|scan|getc|self|send|eval|gets|exec|gsub|proc|load|loop|chop|warn|hash|test|name|to_a|rand|to_s|sub|dup|id|p)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="__(FILE|LINE)__\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="(?<!\w)(<<-?)(["`\']?)([a-zA-Z_]\w*)(\2)(.*?\n)">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(<<-?)("|\')()(\2)(.*?\n)">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="__END__">
+ <token type="CommentPreproc"/>
+ <push state="end-part"/>
+ </rule>
+ <rule pattern="(?:^|(?<=[=<>~!:])|(?<=(?:\s|;)when\s)|(?<=(?:\s|;)or\s)|(?<=(?:\s|;)and\s)|(?<=\.index\s)|(?<=\.scan\s)|(?<=\.sub\s)|(?<=\.sub!\s)|(?<=\.gsub\s)|(?<=\.gsub!\s)|(?<=\.match\s)|(?<=(?:\s|;)if\s)|(?<=(?:\s|;)elsif\s)|(?<=^when\s)|(?<=^index\s)|(?<=^scan\s)|(?<=^sub\s)|(?<=^gsub\s)|(?<=^sub!\s)|(?<=^gsub!\s)|(?<=^match\s)|(?<=^if\s)|(?<=^elsif\s))(\s*)(/)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringRegex"/>
+ </bygroups>
+ <push state="multiline-regex"/>
+ </rule>
+ <rule pattern="(?<=\(|,|\[)/">
+ <token type="LiteralStringRegex"/>
+ <push state="multiline-regex"/>
+ </rule>
+ <rule pattern="(\s+)(/)(?![\s=])">
+ <bygroups>
+ <token type="Text"/>
+ <token type="LiteralStringRegex"/>
+ </bygroups>
+ <push state="multiline-regex"/>
+ </rule>
+ <rule pattern="(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberOct"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberHex"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(0b[01]+(?:_[01]+)*)(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberBin"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([\d]+(?:[_e]\d+)*)(\s*)([/?])?">
+ <bygroups>
+ <token type="LiteralNumberInteger"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="@@[a-zA-Z_]\w*">
+ <token type="NameVariableClass"/>
+ </rule>
+ <rule pattern="@[a-zA-Z_]\w*">
+ <token type="NameVariableInstance"/>
+ </rule>
+ <rule pattern="\$\w+">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="\$[!@&`\'+~=/\\,;.<>_*$?:"^-]">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="\$-[0adFiIlpvw]">
+ <token type="NameVariableGlobal"/>
+ </rule>
+ <rule pattern="::">
+ <token type="Operator"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule pattern="\?(\\[MC]-)*(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)(?!\w)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="[A-Z]\w+">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="(\.|::)(===|\[\]=|<=>|\*\*|==|>=|\+@|<>|>>|<<|-@|\[\]|~|`|\^|\||&|<|%|/|>|\+|-|\*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameOperator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Name"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*[!?]?">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="(\[|\]|\*\*|<<?|>>?|>=|<=|<=>|=~|={3}|!~|&&?|\|\||\.{1,3})">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[-+/*%=<>&!^|~]=?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[(){};,/?:\\]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="defexpr">
+ <rule pattern="(\))(\.|::)?">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Operator"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Operator"/>
+ <push/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="in-intp">
+ <rule pattern="\{">
+ <token type="LiteralStringInterpol"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="multiline-regex">
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\\/">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\/#]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="/[mixounse]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="cb-string">
+ <rule pattern="\\[\\{}]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\\#{}]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#{}]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="funcname">
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="defexpr"/>
+ </rule>
+ <rule pattern="(?:([a-zA-Z_]\w*)(\.))?([a-zA-Z_]\w*[!?]?|\*\*?|[-+]@?|[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Operator"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="sb-intp-string">
+ <rule pattern="\\[\\\[\]]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\]">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[\\#\[\]]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#\[\]]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="pa-string">
+ <rule pattern="\\[\\()]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\\#()]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#()]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="string-intp-escaped">
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="simple-string">
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[^\\"#]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[\\#]">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="pa-intp-string">
+ <rule pattern="\\[\\()]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[\\#()]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#()]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="ab-string">
+ <rule pattern="\\[\\<>]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="<">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern=">">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\\#<>]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#<>]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="sb-regex">
+ <rule pattern="\\[\\\[\]]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="LiteralStringRegex"/>
+ <push/>
+ </rule>
+ <rule pattern="\][mixounse]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#\[\]]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#\[\]]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="pa-regex">
+ <rule pattern="\\[\\()]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="LiteralStringRegex"/>
+ <push/>
+ </rule>
+ <rule pattern="\)[mixounse]*">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp"/>
+ </rule>
+ <rule pattern="[\\#()]">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern="[^\\#()]+">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ </state>
+ <state name="sb-string">
+ <rule pattern="\\[\\\[\]]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern="\]">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[\\#\[\]]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#\[\]]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ <state name="ab-intp-string">
+ <rule pattern="\\[\\<>]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="<">
+ <token type="LiteralStringOther"/>
+ <push/>
+ </rule>
+ <rule pattern=">">
+ <token type="LiteralStringOther"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="string-intp-escaped"/>
+ </rule>
+ <rule pattern="[\\#<>]">
+ <token type="LiteralStringOther"/>
+ </rule>
+ <rule pattern="[^\\#<>]+">
+ <token type="LiteralStringOther"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,375 @@
+<lexer>
+ <config>
+ <name>Rust</name>
+ <alias>rust</alias>
+ <alias>rs</alias>
+ <filename>*.rs</filename>
+ <filename>*.rs.in</filename>
+ <mime_type>text/rust</mime_type>
+ <mime_type>text/x-rust</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="modname">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="lifetime">
+ <rule pattern="[a-zA-Z_]+\w*">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="number_lit">
+ <rule pattern="[ui](8|16|32|64|size)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="f(32|64)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="attribute_common">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="CommentPreproc"/>
+ <push state="attribute["/>
+ </rule>
+ </state>
+ <state name="bytestring">
+ <rule pattern="\\x[89a-fA-F][0-9a-fA-F]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule>
+ <include state="string"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^*/]+">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="doccomment">
+ <rule pattern="[^*/]+">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="LiteralStringDoc"/>
+ <push/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="LiteralStringDoc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ </state>
+ <state name="funcname">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="formatted_string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\['"\\nrt]|\\(?=\n)|\\x[0-7][0-9a-fA-F]|\\0|\\u\{[0-9a-fA-F]{1,6}\}|\{\{|\}\}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\{[^}]*\}">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="[^\\"\{\}]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\['"\\nrt]|\\(?=\n)|\\x[0-7][0-9a-fA-F]|\\0|\\u\{[0-9a-fA-F]{1,6}\}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="#![^[\r\n].*$">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule>
+ <push state="base"/>
+ </rule>
+ </state>
+ <state name="attribute[">
+ <rule>
+ <include state="attribute_common"/>
+ </rule>
+ <rule pattern="\]">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^"\]\[]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ </state>
+ <state name="base">
+ <rule pattern="\n">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="//!.*?\n">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern="///(\n|[^/].*?\n)">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern="//(.*?)\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*\*(\n|[^/*])">
+ <token type="LiteralStringDoc"/>
+ <push state="doccomment"/>
+ </rule>
+ <rule pattern="/\*!">
+ <token type="LiteralStringDoc"/>
+ <push state="doccomment"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="\$([a-zA-Z_]\w*|\(,?|\),?|,?)">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(unsafe|static|extern|return|const|crate|where|while|await|trait|super|async|match|impl|else|move|loop|pub|ref|mut|for|dyn|use|box|in|if|as)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(abstract|override|unsized|virtual|become|typeof|final|macro|yield|priv|try|do)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="self\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="mod\b">
+ <token type="Keyword"/>
+ <push state="modname"/>
+ </rule>
+ <rule pattern="let\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="fn\b">
+ <token type="Keyword"/>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="(struct|enum|type|union)\b">
+ <token type="Keyword"/>
+ <push state="typename"/>
+ </rule>
+ <rule pattern="(default)(\s+)(type|fn)\b">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(isize|usize|bool|char|u128|i128|i64|i32|i16|str|u64|u32|f32|f64|u16|i8|u8)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[sS]elf\b">
+ <token type="NameBuiltinPseudo"/>
+ </rule>
+ <rule pattern="(DoubleEndedIterator|ExactSizeIterator|IntoIterator|PartialOrd|PartialEq|ToString|Iterator|ToOwned|Default|Result|String|FnOnce|Extend|Option|FnMut|Unpin|Sized|AsRef|AsMut|Clone|None|From|Into|Sync|drop|Send|Drop|Copy|Some|Ord|Err|Box|Vec|Eq|Ok|Fn)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="::\b">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(?::|->)">
+ <token type="Text"/>
+ <push state="typename"/>
+ </rule>
+ <rule pattern="(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="NameLabel"/>
+ </bygroups>
+ </rule>
+ <rule pattern="'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0|\\u\{[0-9a-fA-F]{1,6}\}|.)'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(b)('(?:\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0|.)')">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralStringChar"/>
+ </bygroups>
+ </rule>
+ <rule pattern="0b[01_]+">
+ <token type="LiteralNumberBin"/>
+ <push state="number_lit"/>
+ </rule>
+ <rule pattern="0o[0-7_]+">
+ <token type="LiteralNumberOct"/>
+ <push state="number_lit"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F_]+">
+ <token type="LiteralNumberHex"/>
+ <push state="number_lit"/>
+ </rule>
+ <rule pattern="[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)">
+ <token type="LiteralNumberFloat"/>
+ <push state="number_lit"/>
+ </rule>
+ <rule pattern="[0-9][0-9_]*">
+ <token type="LiteralNumberInteger"/>
+ <push state="number_lit"/>
+ </rule>
+ <rule pattern="(b)(")">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="bytestring"/>
+ </rule>
+ <rule pattern="(?s)(b?r)(#*)(".*?"\2)">
+ <bygroups>
+ <token type="LiteralStringAffix"/>
+ <token type="LiteralString"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="'(static|_)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="'">
+ <token type="NameAttribute"/>
+ <push state="lifetime"/>
+ </rule>
+ <rule pattern="\.\.=?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{}()\[\],.;]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[+\-*/%&|<>^!~@=:?]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\b(r#)?_?([A-Z][A-Z0-9_]*){2,}\b">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="((?:e?print(?:ln)?|format(?:_args)?|panic|todo|un(?:reachable|implemented))!)(\s*)(\()(\s*)(")">
+ <bygroups>
+ <token type="NameFunctionMagic"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ <push state="formatted_string"/>
+ </rule>
+ <rule pattern="([a-zA-Z_]\w*!)(\s*)(\(|\[|\{)">
+ <bygroups>
+ <token type="NameFunctionMagic"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(r#)?[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="r#[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="#!?\[">
+ <token type="CommentPreproc"/>
+ <push state="attribute["/>
+ </rule>
+ <rule pattern="#">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="typename">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="&">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="'(static|_)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="'">
+ <token type="NameAttribute"/>
+ <push state="lifetime"/>
+ </rule>
+ <rule pattern="(DoubleEndedIterator|ExactSizeIterator|IntoIterator|PartialOrd|PartialEq|ToString|Iterator|ToOwned|Default|Result|String|FnOnce|Extend|Option|FnMut|Unpin|Sized|AsRef|AsMut|Clone|None|From|Into|Sync|drop|Send|Drop|Copy|Some|Ord|Err|Box|Vec|Eq|Ok|Fn)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(isize|usize|bool|char|u128|i128|i64|i32|i16|str|u64|u32|f32|f64|u16|i8|u8)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,191 @@
+<lexer>
+ <config>
+ <name>SAS</name>
+ <alias>sas</alias>
+ <filename>*.SAS</filename>
+ <filename>*.sas</filename>
+ <mime_type>text/x-sas</mime_type>
+ <mime_type>text/sas</mime_type>
+ <mime_type>application/x-sas</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="validvar">
+ <rule pattern="[a-z_]\w{0,31}\.?">
+ <token type="NameVariable"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="cards-datalines">
+ <rule pattern="^\s*(datalines|cards)\s*;\s*$">
+ <token type="Keyword"/>
+ <push state="data"/>
+ </rule>
+ </state>
+ <state name="proc-data">
+ <rule pattern="(^|;)\s*(proc \w+|data|run|quit)[\s;]">
+ <token type="KeywordReserved"/>
+ </rule>
+ </state>
+ <state name="string_dquote">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\|\\"|\\\n">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="&">
+ <token type="NameVariable"/>
+ <push state="validvar"/>
+ </rule>
+ <rule pattern="[^$&"\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[$"\\]">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="general">
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="vars-strings"/>
+ </rule>
+ <rule>
+ <include state="special"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ </state>
+ <state name="vars-strings">
+ <rule pattern="&[a-z_]\w{0,31}\.?">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="%[a-z_]\w{0,31}">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="\'">
+ <token type="LiteralString"/>
+ <push state="string_squote"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string_dquote"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule>
+ <include state="proc-data"/>
+ </rule>
+ <rule>
+ <include state="cards-datalines"/>
+ </rule>
+ <rule>
+ <include state="logs"/>
+ </rule>
+ <rule>
+ <include state="general"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="data">
+ <rule pattern="(.|\n)*^\s*;\s*$">
+ <token type="Other"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="logs">
+ <rule pattern="\n?^\s*%?put ">
+ <token type="Keyword"/>
+ <push state="log-messages"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="\b(datalines4|datalines|delimiter|startsas|redirect|lostcard|continue|informat|filename|footnote|catname|options|libname|systask|display|waitsas|missing|replace|delete|window|endsas|update|format|attrib|length|infile|select|return|retain|rename|remove|output|cards4|modify|leave|title|merge|delim|input|cards|abort|where|label|array|error|call|page|stop|keep|file|drop|link|skip|list|goto|put|out|set|by|dm|in|x)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(references|distinct|describe|validate|restrict|cascade|msgtype|message|primary|foreign|delete|update|create|unique|having|modify|insert|select|group|check|table|alter|order|reset|index|where|into|from|view|null|like|drop|add|not|key|and|set|on|in|or|as)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(while|until|then|else|end|if|do)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="%(sysevalf|nrbquote|qsysfunc|qlowcase|compstor|nrquote|display|qupcase|datatyp|qcmpres|unquote|syscall|sysfunc|sysrput|sysprod|syslput|sysexec|lowcase|qsubstr|sysget|length|keydef|global|superq|substr|verify|bquote|cmpres|upcase|window|label|qleft|while|qtrim|quote|nrstr|until|sysrc|input|macro|local|qscan|index|else|scan|mend|eval|trim|then|goto|left|put|let|end|str|do|to|if)\b">
+ <token type="NameBuiltin"/>
+ </rule>
@@ -0,0 +1,362 @@
+<lexer>
+ <config>
+ <name>Sass</name>
+ <alias>sass</alias>
+ <filename>*.sass</filename>
+ <mime_type>text/x-sass</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="import">
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <push state="root"/>
+ </rule>
+ </state>
+ <state name="string-single">
+ <rule pattern="(\\.|#(?=[^\n{])|[^\n'#])+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpolation"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string-double">
+ <rule pattern="(\\.|#(?=[^\n{])|[^\n"#])+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpolation"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="pseudo-class">
+ <rule pattern="[\w-]+">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpolation"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="for">
+ <rule pattern="(from|to|through)">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule>
+ <include state="value"/>
+ </rule>
+ </state>
+ <state name="selector">
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\:">
+ <token type="NameDecorator"/>
+ <push state="pseudo-class"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="NameClass"/>
+ <push state="class"/>
+ </rule>
+ <rule pattern="\#">
+ <token type="NameNamespace"/>
+ <push state="id"/>
+ </rule>
+ <rule pattern="[\w-]+">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpolation"/>
+ </rule>
+ <rule pattern="&">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[~^*!&\[\]()<>|+=@:;,./?-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string-double"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <push state="string-single"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <push state="root"/>
+ </rule>
+ </state>
+ <state name="value">
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[!$][\w-]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="url\(">
+ <token type="LiteralStringOther"/>
+ <push state="string-url"/>
+ </rule>
+ <rule pattern="[a-z_-][\w-]*(?=\()">
+ <token type="NameFunction"/>
+ </rule>
@@ -0,0 +1,274 @@
+<lexer>
+ <config>
+ <name>Scala</name>
+ <alias>scala</alias>
+ <filename>*.scala</filename>
+ <mime_type>text/x-scala</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="import">
+ <rule pattern="([\\$_\p{L}](?:[\\$_\p{L}]|[0-9])*(?:(?<=_)[-~\^\*!%&\\<>\|+=:/?@�-�����-����϶҂؆-؈؎-؏۩۽-۾߶৺୰௳-௸௺౿ೱ-ೲ൹༁-༃༓-༗༚-༟༴༶༸྾-࿅࿇-࿏႞-႟፠᎐-᎙᥀᧠-᧿᭡-᭪᭴-᭼⁄⁒⁺-⁼₊-₌℀-℁℃-℆℈-℉℔№-℘℞-℣℥℧℩℮℺-℻⅀-⅄⅊-⅍⅏←-⌨⌫-⑊⒜-ⓩ─-❧➔-⟄⟇-⟥⟰-⦂⦙-⧗⧜-⧻⧾-⭔⳥-⳪⺀-⿻〄〒-〓〠〶-〷〾-〿㆐-㆑㆖-㆟㇀-㇣㈀-㈞㈪-㉐㉠-㉿㊊-㊰㋀-㏿䷀-䷿꒐-꓆꠨-꠫﬩﷽﹢﹤-﹦+<->|~¬¦│-○-�]+)?|\.)+">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="interpstringcommon">
+ <rule pattern="[^"$\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\$\$">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\$[\\$_\p{L}](?:[\\$_\p{L}]|\d)*">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpbrace"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="interptriplestring">
+ <rule pattern=""""(?!")">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="interpstringcommon"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="(class|trait|object)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="[^\S\n]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="@[\\$_\p{L}](?:[\\$_\p{L}]|[0-9])*(?:(?<=_)[-~\^\*!%&\\<>\|+=:/?@�-�����-����϶҂؆-؈؎-؏۩۽-۾߶৺୰௳-௸௺౿ೱ-ೲ൹༁-༃༓-༗༚-༟༴༶༸྾-࿅࿇-࿏႞-႟፠᎐-᎙᥀᧠-᧿᭡-᭪᭴-᭼⁄⁒⁺-⁼₊-₌℀-℁℃-℆℈-℉℔№-℘℞-℣℥℧℩℮℺-℻⅀-⅄⅊-⅍⅏←-⌨⌫-⑊⒜-ⓩ─-❧➔-⟄⟇-⟥⟰-⦂⦙-⧗⧜-⧻⧾-⭔⳥-⳪⺀-⿻〄〒-〓〠〶-〷〾-〿㆐-㆑㆖-㆟㇀-㇣㈀-㈞㈪-㉐㉠-㉿㊊-㊰㋀-㏿䷀-䷿꒐-꓆꠨-꠫﬩﷽﹢﹤-﹦+<->|~¬¦│-○-�]+)?">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="(abstract|ca(?:se|tch)|d(?:ef|o)|e(?:lse|xtends)|f(?:inal(?:ly)?|or(?:Some)?)|i(?:f|mplicit)|lazy|match|new|override|pr(?:ivate|otected)|re(?:quires|turn)|s(?:ealed|uper)|t(?:h(?:is|row)|ry)|va[lr]|w(?:hile|ith)|yield)\b|(<[%:-]|=>|>:|[#=@_⇒←])(\b|(?=\s)|$)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern=":(?![-~\^\*!%&\\<>\|+=:/?@�-�����-����϶҂؆-؈؎-؏۩۽-۾߶৺୰௳-௸௺౿ೱ-ೲ൹༁-༃༓-༗༚-༟༴༶༸྾-࿅࿇-࿏႞-႟፠᎐-᎙᥀᧠-᧿᭡-᭪᭴-᭼⁄⁒⁺-⁼₊-₌℀-℁℃-℆℈-℉℔№-℘℞-℣℥℧℩℮℺-℻⅀-⅄⅊-⅍⅏←-⌨⌫-⑊⒜-ⓩ─-❧➔-⟄⟇-⟥⟰-⦂⦙-⧗⧜-⧻⧾-⭔⳥-⳪⺀-⿻〄〒-〓〠〶-〷〾-〿㆐-㆑㆖-㆟㇀-㇣㈀-㈞㈪-㉐㉠-㉿㊊-㊰㋀-㏿䷀-䷿꒐-꓆꠨-꠫﬩﷽﹢﹤-﹦+<->|~¬¦│-○-�]+%s)">
+ <token type="Keyword"/>
+ <push state="type"/>
+ </rule>
+ <rule pattern="[\\$_\p{Lu}][\\$_\p{L}](?:[\\$_\p{L}]|[0-9])*(?:(?<=_)[-~\^\*!%&\\<>\|+=:/?@�-�����-����϶҂؆-؈؎-؏۩۽-۾߶৺୰௳-௸௺౿ೱ-ೲ൹༁-༃༓-༗༚-༟༴༶༸྾-࿅࿇-࿏႞-႟፠᎐-᎙᥀᧠-᧿᭡-᭪᭴-᭼⁄⁒⁺-⁼₊-₌℀-℁℃-℆℈-℉℔№-℘℞-℣℥℧℩℮℺-℻⅀-⅄⅊-⅍⅏←-⌨⌫-⑊⒜-ⓩ─-❧➔-⟄⟇-⟥⟰-⦂⦙-⧗⧜-⧻⧾-⭔⳥-⳪⺀-⿻〄〒-〓〠〶-〷〾-〿㆐-㆑㆖-㆟㇀-㇣㈀-㈞㈪-㉐㉠-㉿㊊-㊰㋀-㏿䷀-䷿꒐-꓆꠨-꠫﬩﷽﹢﹤-﹦+<->|~¬¦│-○-�]+)?\b">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(import|package)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern="(type)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="type"/>
+ </rule>
+ <rule pattern="""".*?"""(?!")">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="'[\\$_\p{L}](?:[\\$_\p{L}]|[0-9])*(?:(?<=_)[-~\^\*!%&\\<>\|+=:/?@�-�����-����϶҂؆-؈؎-؏۩۽-۾߶৺୰௳-௸௺౿ೱ-ೲ൹༁-༃༓-༗༚-༟༴༶༸྾-࿅࿇-࿏႞-႟፠᎐-᎙᥀᧠-᧿᭡-᭪᭴-᭼⁄⁒⁺-⁼₊-₌℀-℁℃-℆℈-℉℔№-℘℞-℣℥℧℩℮℺-℻⅀-⅄⅊-⅍⅏←-⌨⌫-⑊⒜-ⓩ─-❧➔-⟄⟇-⟥⟰-⦂⦙-⧗⧜-⧻⧾-⭔⳥-⳪⺀-⿻〄〒-〓〠〶-〷〾-〿㆐-㆑㆖-㆟㇀-㇣㈀-㈞㈪-㉐㉠-㉿㊊-㊰㋀-㏿䷀-䷿꒐-꓆꠨-꠫﬩﷽﹢﹤-﹦+<->|~¬¦│-○-�]+)?">
+ <token type="TextSymbol"/>
+ </rule>
+ <rule pattern="[fs]"""">
+ <token type="LiteralString"/>
+ <push state="interptriplestring"/>
+ </rule>
+ <rule pattern="[fs]"">
+ <token type="LiteralString"/>
+ <push state="interpstring"/>
+ </rule>
+ <rule pattern="raw"(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[\\$_\p{L}](?:[\\$_\p{L}]|[0-9])*(?:(?<=_)[-~\^\*!%&\\<>\|+=:/?@�-�����-����϶҂؆-؈؎-؏۩۽-۾߶৺୰௳-௸௺౿ೱ-ೲ൹༁-༃༓-༗༚-༟༴༶༸྾-࿅࿇-࿏႞-႟፠᎐-᎙᥀᧠-᧿᭡-᭪᭴-᭼⁄⁒⁺-⁼₊-₌℀-℁℃-℆℈-℉℔№-℘℞-℣℥℧℩℮℺-℻⅀-⅄⅊-⅍⅏←-⌨⌫-⑊⒜-ⓩ─-❧➔-⟄⟇-⟥⟰-⦂⦙-⧗⧜-⧻⧾-⭔⳥-⳪⺀-⿻〄〒-〓〠〶-〷〾-〿㆐-㆑㆖-㆟㇀-㇣㈀-㈞㈪-㉐㉠-㉿㊊-㊰㋀-㏿䷀-䷿꒐-꓆꠨-꠫﬩﷽﹢﹤-﹦+<->|~¬¦│-○-�]+)?">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="`[^`]+`">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Operator"/>
+ <push state="typeparam"/>
+ </rule>
+ <rule pattern="[(){};,.#]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[-~\^\*!%&\\<>\|+=:/?@�-�����-����϶҂؆-؈؎-؏۩۽-۾߶৺୰௳-௸௺౿ೱ-ೲ൹༁-༃༓-༗༚-༟༴༶༸྾-࿅࿇-࿏႞-႟፠᎐-᎙᥀᧠-᧿᭡-᭪᭴-᭼⁄⁒⁺-⁼₊-₌℀-℁℃-℆℈-℉℔№-℘℞-℣℥℧℩℮℺-℻⅀-⅄⅊-⅍⅏←-⌨⌫-⑊⒜-ⓩ─-❧➔-⟄⟇-⟥⟰-⦂⦙-⧗⧜-⧻⧾-⭔⳥-⳪⺀-⿻〄〒-〓〠〶-〷〾-〿㆐-㆑㆖-㆟㇀-㇣㈀-㈞㈪-㉐㉠-㉿㊊-㊰㋀-㏿䷀-䷿꒐-꓆꠨-꠫﬩﷽﹢﹤-﹦+<->|~¬¦│-○-�]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="([0-9][0-9]*\.[0-9]*|\.[0-9]+)([eE][+-]?[0-9]+)?[fFdD]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+L?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="type">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="<[%:]|>:|[#_]|forSome|type">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="([,);}]|=>|=|⇒)(\s*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[({]">
+ <token type="Operator"/>
+ <push/>
+ </rule>
@@ -0,0 +1,106 @@
+<lexer>
+ <config>
+ <name>Scheme</name>
+ <alias>scheme</alias>
+ <alias>scm</alias>
+ <filename>*.scm</filename>
+ <filename>*.ss</filename>
+ <mime_type>text/x-scheme</mime_type>
+ <mime_type>application/x-scheme</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern=";.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="#\|">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comment"/>
+ </rule>
+ <rule pattern="#;\s*\(">
+ <token type="Comment"/>
+ <push state="commented-form"/>
+ </rule>
+ <rule pattern="#!r6rs">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="-?\d+\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="-?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'[\w!$%&*+,/:<=>?@^~|-]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="#\\(alarm|backspace|delete|esc|linefeed|newline|page|return|space|tab|vtab|x[0-9a-zA-Z]{1,5}|.)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(#t|#f)">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="('|#|`|,@|,|\.)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(lambda |define |if |else |cond |and |or |case |let |let\* |letrec |begin |do |delay |set\! |\=\> |quote |quasiquote |unquote |unquote\-splicing |define\-syntax |let\-syntax |letrec\-syntax |syntax\-rules )">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(?<='\()[\w!$%&*+,/:<=>?@^~|-]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(?<=#\()[\w!$%&*+,/:<=>?@^~|-]+">
+ <token type="NameVariable"/>
+ </rule>
@@ -0,0 +1,98 @@
+<lexer>
+ <config>
+ <name>Scilab</name>
+ <alias>scilab</alias>
+ <filename>*.sci</filename>
+ <filename>*.sce</filename>
+ <filename>*.tst</filename>
+ <mime_type>text/scilab</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="//.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="^\s*function">
+ <token type="Keyword"/>
+ <push state="deffunc"/>
+ </rule>
+ <rule pattern="(unwind_protect_cleanup|end_unwind_protect|unwind_protect|end_try_catch|endproperties|endclassdef|endfunction|persistent|properties|endmethods|otherwise|endevents|endswitch|__FILE__|continue|classdef|__LINE__|endwhile|function|methods|elseif|return|static|events|global|endfor|switch|until|endif|while|catch|break|case|else|set|end|try|for|get|do|if)\b">
+ <token type="Keyword"/>
+ </rule>
@@ -0,0 +1,373 @@
+<lexer>
+ <config>
+ <name>SCSS</name>
+ <alias>scss</alias>
+ <filename>*.scss</filename>
+ <mime_type>text/x-scss</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <dot_all>true</dot_all>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="string-double">
+ <rule pattern="(\\.|#(?=[^\n{])|[^\n"#])+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpolation"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="id">
+ <rule pattern="[\w-]+">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="#\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interpolation"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="value">
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="!(important|default|global)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[!$][\w-]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="url\(">
+ <token type="LiteralStringOther"/>
+ <push state="string-url"/>
+ </rule>
+ <rule pattern="[a-z_-][\w-]*(?=\()">
+ <token type="NameFunction"/>
+ </rule>
@@ -0,0 +1,28 @@
+<lexer>
+ <config>
+ <name>Sed</name>
+ <alias>sed</alias>
+ <alias>gsed</alias>
+ <alias>ssed</alias>
+ <filename>*.sed</filename>
+ <filename>*.[gs]sed</filename>
+ <mime_type>text/x-sed</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="#.*$"><token type="CommentSingle"/></rule>
+ <rule pattern="[0-9]+"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern="\$"><token type="Operator"/></rule>
+ <rule pattern="[{};,!]"><token type="Punctuation"/></rule>
+ <rule pattern="[dDFgGhHlnNpPqQxz=]"><token type="Keyword"/></rule>
+ <rule pattern="([berRtTvwW:])([^;\n]*)"><bygroups><token type="Keyword"/><token type="LiteralStringSingle"/></bygroups></rule>
+ <rule pattern="([aci])((?:.*?\\\n)*(?:.*?[^\\]$))"><bygroups><token type="Keyword"/><token type="LiteralStringDouble"/></bygroups></rule>
+ <rule pattern="([qQ])([0-9]*)"><bygroups><token type="Keyword"/><token type="LiteralNumberInteger"/></bygroups></rule>
+ <rule pattern="(/)((?:(?:\\[^\n]|[^\\])*?\\\n)*?(?:\\.|[^\\])*?)(/)"><bygroups><token type="Punctuation"/><token type="LiteralStringRegex"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="(\\(.))((?:(?:\\[^\n]|[^\\])*?\\\n)*?(?:\\.|[^\\])*?)(\2)"><bygroups><token type="Punctuation"/>None<token type="LiteralStringRegex"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="(y)(.)((?:(?:\\[^\n]|[^\\])*?\\\n)*?(?:\\.|[^\\])*?)(\2)((?:(?:\\[^\n]|[^\\])*?\\\n)*?(?:\\.|[^\\])*?)(\2)"><bygroups><token type="Keyword"/><token type="Punctuation"/><token type="LiteralStringSingle"/><token type="Punctuation"/><token type="LiteralStringSingle"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="(s)(.)((?:(?:\\[^\n]|[^\\])*?\\\n)*?(?:\\.|[^\\])*?)(\2)((?:(?:\\[^\n]|[^\\])*?\\\n)*?(?:\\.|[^\\])*?)(\2)((?:[gpeIiMm]|[0-9])*)"><bygroups><token type="Keyword"/><token type="Punctuation"/><token type="LiteralStringRegex"/><token type="Punctuation"/><token type="LiteralStringSingle"/><token type="Punctuation"/><token type="Keyword"/></bygroups></rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,61 @@
+<lexer>
+ <config>
+ <name>Sieve</name>
+ <alias>sieve</alias>
+ <filename>*.siv</filename>
+ <filename>*.sieve</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[();,{}\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(?i)require">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(?i)(:)(addresses|all|contains|content|create|copy|comparator|count|days|detail|domain|fcc|flags|from|handle|importance|is|localpart|length|lowerfirst|lower|matches|message|mime|options|over|percent|quotewildcard|raw|regex|specialuse|subject|text|under|upperfirst|upper|value)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|ereject|exists|false|fileinto|if|hasflag|header|keep|notify_method_capability|notify|not|redirect|reject|removeflag|setflag|size|spamtest|stop|string|true|vacation|virustest)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?i)set">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="([0-9.]+)([kmgKMG])?">
+ <bygroups>
+ <token type="LiteralNumber"/>
+ <token type="LiteralNumber"/>
+ </bygroups>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern=""[^"]*?"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="text:">
+ <token type="NameTag"/>
+ <push state="text"/>
+ </rule>
+ </state>
+ <state name="text">
+ <rule pattern="[^.].*?\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="^\.">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,73 @@
+<!--
+ Generated from https://github.com/pygments/pygments/blob/15f222adefd2bf7835bfd74a12d720028ae68d29/pygments/lexers/dalvik.py.
+-->
+<lexer>
+ <config>
+ <name>Smali</name>
+ <alias>smali</alias>
+ <filename>*.smali</filename>
+ <mime_type>text/smali</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule><include state="comment"/></rule>
+ <rule><include state="label"/></rule>
+ <rule><include state="field"/></rule>
+ <rule><include state="method"/></rule>
+ <rule><include state="class"/></rule>
+ <rule><include state="directive"/></rule>
+ <rule><include state="access-modifier"/></rule>
+ <rule><include state="instruction"/></rule>
+ <rule><include state="literal"/></rule>
+ <rule><include state="punctuation"/></rule>
+ <rule><include state="type"/></rule>
+ <rule><include state="whitespace"/></rule>
+ </state>
+ <state name="directive">
+ <rule pattern="^([ \t]*)(\.(?:class|super|implements|field|subannotation|annotation|enum|method|registers|locals|array-data|packed-switch|sparse-switch|catchall|catch|line|parameter|local|prologue|epilogue|source))"><bygroups><token type="TextWhitespace"/><token type="Keyword"/></bygroups></rule>
+ <rule pattern="^([ \t]*)(\.end)( )(field|subannotation|annotation|method|array-data|packed-switch|sparse-switch|parameter|local)"><bygroups><token type="TextWhitespace"/><token type="Keyword"/><token type="TextWhitespace"/><token type="Keyword"/></bygroups></rule>
+ <rule pattern="^([ \t]*)(\.restart)( )(local)"><bygroups><token type="TextWhitespace"/><token type="Keyword"/><token type="TextWhitespace"/><token type="Keyword"/></bygroups></rule>
+ </state>
+ <state name="access-modifier">
+ <rule pattern="(public|private|protected|static|final|synchronized|bridge|varargs|native|abstract|strictfp|synthetic|constructor|declared-synchronized|interface|enum|annotation|volatile|transient)"><token type="Keyword"/></rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\n"><token type="TextWhitespace"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ </state>
+ <state name="instruction">
+ <rule pattern="\b[vp]\d+\b"><token type="NameBuiltin"/></rule>
+ <rule pattern="(\b[a-z][A-Za-z0-9/-]+)(\s+)"><bygroups><token type="Text"/><token type="TextWhitespace"/></bygroups></rule>
+ </state>
+ <state name="literal">
+ <rule pattern="".*""><token type="LiteralString"/></rule>
+ <rule pattern="0x[0-9A-Fa-f]+t?"><token type="LiteralNumberHex"/></rule>
+ <rule pattern="[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="[0-9]+L?"><token type="LiteralNumberInteger"/></rule>
+ </state>
+ <state name="field">
+ <rule pattern="(\$?\b)([\w$]*)(:)"><bygroups><token type="Punctuation"/><token type="NameVariable"/><token type="Punctuation"/></bygroups></rule>
+ </state>
+ <state name="method">
+ <rule pattern="<(?:cl)?init>"><token type="NameFunction"/></rule>
+ <rule pattern="(\$?\b)([\w$]*)(\()"><bygroups><token type="Punctuation"/><token type="NameFunction"/><token type="Punctuation"/></bygroups></rule>
+ </state>
+ <state name="label">
+ <rule pattern=":\w+"><token type="NameLabel"/></rule>
+ </state>
+ <state name="class">
+ <rule pattern="(L)((?:[\w$]+/)*)([\w$]+)(;)"><bygroups><token type="KeywordType"/><token type="Text"/><token type="NameClass"/><token type="Text"/></bygroups></rule>
+ </state>
+ <state name="punctuation">
+ <rule pattern="->"><token type="Punctuation"/></rule>
+ <rule pattern="[{},():=.-]"><token type="Punctuation"/></rule>
+ </state>
+ <state name="type">
+ <rule pattern="[ZBSCIJFDV\[]+"><token type="KeywordType"/></rule>
+ </state>
+ <state name="comment">
+ <rule pattern="#.*?\n"><token type="Comment"/></rule>
+ </state>
+ </rules>
+</lexer>
+
@@ -0,0 +1,294 @@
+<lexer>
+ <config>
+ <name>Smalltalk</name>
+ <alias>smalltalk</alias>
+ <alias>squeak</alias>
+ <alias>st</alias>
+ <filename>*.st</filename>
+ <mime_type>text/x-smalltalk</mime_type>
+ </config>
+ <rules>
+ <state name="inner_parenth">
+ <rule pattern="\)">
+ <token type="LiteralStringSymbol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="_parenth_helper"/>
+ </rule>
+ </state>
+ <state name="objects">
+ <rule pattern="\[">
+ <token type="Text"/>
+ <push state="blockvariables"/>
+ </rule>
+ <rule pattern="\]">
+ <token type="Text"/>
+ <push state="afterobject"/>
+ </rule>
+ <rule pattern="\b(self|super|true|false|nil|thisContext)\b">
+ <token type="NameBuiltinPseudo"/>
+ <push state="afterobject"/>
+ </rule>
+ <rule pattern="\b[A-Z]\w*(?!:)\b">
+ <token type="NameClass"/>
+ <push state="afterobject"/>
+ </rule>
+ <rule pattern="\b[a-z]\w*(?!:)\b">
+ <token type="NameVariable"/>
+ <push state="afterobject"/>
+ </rule>
+ <rule pattern="#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)">
+ <token type="LiteralStringSymbol"/>
+ <push state="afterobject"/>
+ </rule>
+ <rule>
+ <include state="literals"/>
+ </rule>
+ </state>
+ <state name="afterobject">
+ <rule pattern="! !$">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="whitespaces"/>
+ </rule>
+ <rule pattern="\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)">
+ <token type="NameBuiltin"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\b(new\b(?!:))">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern=":=|_">
+ <token type="Operator"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\b[a-zA-Z]+\w*:">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\b[a-zA-Z]+\w*">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="\w+:?|[-+*/\\~<>=|&!?,@%]+">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[\])}]">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[\[({]">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="literals">
+ <rule pattern="'(''|[^'])*'">
+ <token type="LiteralString"/>
+ <push state="afterobject"/>
+ </rule>
+ <rule pattern="\$.">
+ <token type="LiteralStringChar"/>
+ <push state="afterobject"/>
+ </rule>
+ <rule pattern="#\(">
+ <token type="LiteralStringSymbol"/>
+ <push state="parenth"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Text"/>
+ <push state="afterobject"/>
+ </rule>
+ <rule pattern="(\d+r)?-?\d+(\.\d+)?(e-?\d+)?">
+ <token type="LiteralNumber"/>
+ <push state="afterobject"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="(<)(\w+:)(.*?)(>)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="squeak fileout"/>
+ </rule>
+ <rule>
+ <include state="whitespaces"/>
+ </rule>
+ <rule>
+ <include state="method definition"/>
+ </rule>
+ <rule pattern="(\|)([\w\s]*)(\|)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameVariable"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="objects"/>
+ </rule>
+ <rule pattern="\^|:=|_">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[\]({}.;!]">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="_parenth_helper">
+ <rule>
+ <include state="whitespaces"/>
+ </rule>
+ <rule pattern="(\d+r)?-?\d+(\.\d+)?(e-?\d+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[-+*/\\~<>=|&#!?,@%\w:]+">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ <rule pattern="'(''|[^'])*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\$.">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="#*\(">
+ <token type="LiteralStringSymbol"/>
+ <push state="inner_parenth"/>
+ </rule>
+ </state>
+ <state name="parenth">
+ <rule pattern="\)">
+ <token type="LiteralStringSymbol"/>
+ <push state="root" state="afterobject"/>
+ </rule>
+ <rule>
+ <include state="_parenth_helper"/>
+ </rule>
+ </state>
+ <state name="whitespaces">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=""(""|[^"])*"">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="squeak fileout">
+ <rule pattern="^"(""|[^"])*"!">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="^'(''|[^'])*'!">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="NameClass"/>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="NameClass"/>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\w+)( subclass: )(#\w+)(\s+instanceVariableNames: )(.*?)(\s+classVariableNames: )(.*?)(\s+poolDictionaries: )(.*?)(\s+category: )(.*?)(!)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Keyword"/>
+ <token type="LiteralStringSymbol"/>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Keyword"/>
+ <token type="LiteralString"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(!\n)(\].*)(! !)$">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="! !$">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="method definition">
+ <rule pattern="([a-zA-Z]+\w*:)(\s*)(\w+)">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\b[a-zA-Z]+\w*\b)(\s*)$">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$">
+ <bygroups>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="blockvariables">
+ <rule>
+ <include state="whitespaces"/>
+ </rule>
+ <rule pattern="(:)(\s*)(\w+)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\|">
+ <token type="Operator"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,79 @@
+<lexer>
+ <config>
+ <name>Smarty</name>
+ <alias>smarty</alias>
+ <filename>*.tpl</filename>
+ <mime_type>application/x-smarty</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[^{]+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="(\{)(\*.*?\*)(\})">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Comment"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{php\})(.*?)(\{/php\})">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <using lexer="PHP"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{)(/?[a-zA-Z_]\w*)(\s*)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="smarty"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="CommentPreproc"/>
+ <push state="smarty"/>
+ </rule>
+ </state>
+ <state name="smarty">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="CommentPreproc"/>
+ <push/>
+ </rule>
+ <rule pattern="\}">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="#[a-zA-Z_]\w*#">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\$[a-zA-Z_]\w*(\.\w+)*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[~!%^&*()+=|\[\]:;,.<>/?@-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameAttribute"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,58 @@
+
+<lexer>
+ <config>
+ <name>SNBT</name>
+ <alias>snbt</alias>
+ <filename>*.snbt</filename>
+ <mime_type>text/snbt</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\{"><token type="Punctuation"/><push state="compound"/></rule>
+ <rule pattern="[^\{]+"><token type="Text"/></rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ </state>
+ <state name="operators">
+ <rule pattern="[,:;]"><token type="Punctuation"/></rule>
+ </state>
+ <state name="literals">
+ <rule pattern="(true|false)"><token type="KeywordConstant"/></rule>
+ <rule pattern="-?\d+[eE]-?\d+"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="-?\d*\.\d+[fFdD]?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="-?\d+[bBsSlLfFdD]?"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern="""><token type="LiteralStringDouble"/><push state="literals.string_double"/></rule>
+ <rule pattern="'"><token type="LiteralStringSingle"/><push state="literals.string_single"/></rule>
+ </state>
+ <state name="literals.string_double">
+ <rule pattern="\\."><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\"\n]+"><token type="LiteralStringDouble"/></rule>
+ <rule pattern="""><token type="LiteralStringDouble"/><pop depth="1"/></rule>
+ </state>
+ <state name="literals.string_single">
+ <rule pattern="\\."><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\'\n]+"><token type="LiteralStringSingle"/></rule>
+ <rule pattern="'"><token type="LiteralStringSingle"/><pop depth="1"/></rule>
+ </state>
+ <state name="compound">
+ <rule pattern="[A-Z_a-z]+"><token type="NameAttribute"/></rule>
+ <rule><include state="operators"/></rule>
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="literals"/></rule>
+ <rule pattern="\{"><token type="Punctuation"/><push/></rule>
+ <rule pattern="\["><token type="Punctuation"/><push state="list"/></rule>
+ <rule pattern="\}"><token type="Punctuation"/><pop depth="1"/></rule>
+ </state>
+ <state name="list">
+ <rule pattern="[A-Z_a-z]+"><token type="NameAttribute"/></rule>
+ <rule><include state="literals"/></rule>
+ <rule><include state="operators"/></rule>
+ <rule><include state="whitespace"/></rule>
+ <rule pattern="\["><token type="Punctuation"/><push/></rule>
+ <rule pattern="\{"><token type="Punctuation"/><push state="compound"/></rule>
+ <rule pattern="\]"><token type="Punctuation"/><pop depth="1"/></rule>
+ </state>
+ </rules>
+</lexer>
+
@@ -0,0 +1,95 @@
+<lexer>
+ <config>
+ <name>Snobol</name>
+ <alias>snobol</alias>
+ <filename>*.snobol</filename>
+ <mime_type>text/x-snobol</mime_type>
+ </config>
+ <rules>
+ <state name="heredoc">
+ <rule pattern=".*\n">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\*.*\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="[+.] ">
+ <token type="Punctuation"/>
+ <push state="statement"/>
+ </rule>
+ <rule pattern="-.*\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="END\s*\n">
+ <token type="NameLabel"/>
+ <push state="heredoc"/>
+ </rule>
+ <rule pattern="[A-Za-z$][\w$]*">
+ <token type="NameLabel"/>
+ <push state="statement"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ <push state="statement"/>
+ </rule>
+ </state>
+ <state name="statement">
+ <rule pattern="\s*\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[A-Za-z][\w.]*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\*\*|[?$.!%*/#+\-@|&\\=]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=""[^"]*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'[^']*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[0-9]+(?=[^.EeDd])">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ <push state="goto"/>
+ </rule>
+ <rule pattern="[()<>,;]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="goto">
+ <rule pattern="\s*\n">
+ <token type="Text"/>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="F|S">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(\()([A-Za-z][\w.]*)(\))">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameLabel"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,279 @@
+<lexer>
+ <config>
+ <name>Solidity</name>
+ <alias>sol</alias>
+ <alias>solidity</alias>
+ <filename>*.sol</filename>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="strings">
+ <rule pattern="hex'[0-9a-fA-F]+'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="hex"[0-9a-fA-F]+"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <combined state="string-parse-common" state="string-parse-double"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <combined state="string-parse-common" state="string-parse-single"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule>
+ <include state="keywords-types"/>
+ </rule>
+ <rule>
+ <include state="keywords-other"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="\+\+|--|\*\*|\?|:|~|&&|\|\||=>|==?|!=?|(<<|>>>?|[-<>+*%&|^/])=?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(abi|block|msg|tx)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?!abi\.)(decode|encode|encodePacked|encodeWithSelector|encodeWithSignature|encodeWithSelector)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?!block\.)(chainid|coinbase|difficulty|gaslimit|number|timestamp)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?!msg\.)(data|gas|sender|value)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?!tx\.)(gasprice|origin)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(type)(\()([a-zA-Z_]\w*)(\))">
+ <bygroups>
+ <token type="NameBuiltin"/>
+ <token type="Punctuation"/>
+ <token type="NameClass"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?!type\([a-zA-Z_]\w*\)\.)(creationCode|interfaceId|max|min|name|runtimeCode)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(now|this|super|gasleft)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(selfdestruct|suicide)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?!0x[0-9a-fA-F]+\.)(balance|code|codehash|send|transfer)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(assert|revert|require)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(call|callcode|delegatecall)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="selector\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(addmod|blockhash|ecrecover|keccak256|mulmod|ripemd160|sha256|sha3)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="keywords-types">
+ <rule pattern="(address|ufixed|string|bytes|fixed|byte|bool|uint|int)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(int160|int248|int240|int232|int224|int216|int208|int200|int192|int184|int176|int168|int104|int112|int120|int128|int136|int144|int152|int256|int96|int88|int80|int72|int64|int56|int48|int40|int32|int24|int16|int8)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(uint160|uint248|uint240|uint232|uint224|uint216|uint208|uint200|uint192|uint184|uint176|uint168|uint104|uint112|uint120|uint128|uint136|uint144|uint152|uint256|uint96|uint88|uint80|uint72|uint64|uint56|uint48|uint40|uint32|uint24|uint16|uint8)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(bytes23|bytes31|bytes30|bytes29|bytes28|bytes27|bytes26|bytes25|bytes24|bytes10|bytes11|bytes12|bytes13|bytes14|bytes15|bytes16|bytes17|bytes18|bytes19|bytes20|bytes21|bytes22|bytes32|bytes9|bytes8|bytes7|bytes6|bytes5|bytes4|bytes3|bytes2|bytes1)\b">
+ <token type="KeywordType"/>
+ </rule>
@@ -0,0 +1,59 @@
+<lexer>
+ <config>
+ <name>SourcePawn</name>
+ <alias>sp</alias>
+ <filename>*.sp</filename>
+ <filename>*.inc</filename>
+ <mime_type>text/x-sourcepawn</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^#if\s+0"><token type="CommentPreproc"/><push state="if0"/></rule>
+ <rule pattern="^#"><token type="CommentPreproc"/><push state="macro"/></rule>
+ <rule pattern="^\s*(?:/[*].*?[*]/\s*)*#if\s+0"><token type="CommentPreproc"/><push state="if0"/></rule>
+ <rule pattern="^\s*(?:/[*].*?[*]/\s*)*#"><token type="CommentPreproc"/><push state="macro"/></rule>
+ <rule pattern="\n"><token type="Text"/></rule>
+ <rule pattern="\s+"><token type="Text"/></rule>
+ <rule pattern="\\\n"><token type="Text"/></rule>
+ <rule pattern="/(\\\n)?/(\n|(.|\n)*?[^\\]\n)"><token type="CommentSingle"/></rule>
+ <rule pattern="/(\\\n)?\*(.|\n)*?\*(\\\n)?/"><token type="CommentMultiline"/></rule>
+ <rule pattern="[{}]"><token type="Punctuation"/></rule>
+ <rule pattern="L?""><token type="LiteralString"/><push state="string"/></rule>
+ <rule pattern="L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'"><token type="LiteralStringChar"/></rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="0x[0-9a-fA-F]+[LlUu]*"><token type="LiteralNumberHex"/></rule>
+ <rule pattern="0[0-7]+[LlUu]*"><token type="LiteralNumberOct"/></rule>
+ <rule pattern="\d+[LlUu]*"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]"><token type="Operator"/></rule>
+ <rule pattern="[()\[\],.;]"><token type="Punctuation"/></rule>
+ <rule pattern="(case|const|continue|native|default|else|enum|for|if|new|operator|public|return|sizeof|static|decl|struct|switch)\b"><token type="Keyword"/></rule>
+ <rule pattern="(bool|float|void|int|char)\b"><token type="KeywordType"/></rule>
+ <rule pattern="(true|false)\b"><token type="KeywordConstant"/></rule>
+ <rule pattern="[a-zA-Z_]\w*"><token type="Name"/></rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;{]*)(\{)"><bygroups><usingself state="root"/><token type="NameFunction"/><usingself state="root"/><usingself state="root"/><token type="Punctuation"/></bygroups><push state="function"/></rule>
+ <rule pattern="((?:[\w*\s])+?(?:\s|[*]))([a-zA-Z_]\w*)(\s*\([^;]*?\))([^;]*)(;)"><bygroups><usingself state="root"/><token type="NameFunction"/><usingself state="root"/><usingself state="root"/><token type="Punctuation"/></bygroups></rule>
+ </state>
+ <state name="string">
+ <rule pattern="""><token type="LiteralString"/><pop depth="1"/></rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\"\n]+"><token type="LiteralString"/></rule>
+ <rule pattern="\\\n"><token type="LiteralString"/></rule>
+ <rule pattern="\\"><token type="LiteralString"/></rule>
+ </state>
+ <state name="macro">
+ <rule pattern="(include)(\s*(?:/[*].*?[*]/\s*)?)([^\n]+)"><bygroups><token type="CommentPreproc"/><token type="Text"/><token type="CommentPreprocFile"/></bygroups></rule>
+ <rule pattern="[^/\n]+"><token type="CommentPreproc"/></rule>
+ <rule pattern="/\*(.|\n)*?\*/"><token type="CommentMultiline"/></rule>
+ <rule pattern="//.*?\n"><token type="CommentSingle"/><pop depth="1"/></rule>
+ <rule pattern="/"><token type="CommentPreproc"/></rule>
+ <rule pattern="(?<=\\)\n"><token type="CommentPreproc"/></rule>
+ <rule pattern="\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
+ </state>
+ <state name="if0">
+ <rule pattern="^\s*#if.*?(?<!\\)\n"><token type="CommentPreproc"/><push/></rule>
+ <rule pattern="^\s*#endif.*?(?<!\\)\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
+ <rule pattern=".*?\n"><token type="Comment"/></rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,160 @@
+<lexer>
+ <config>
+ <name>SPARQL</name>
+ <alias>sparql</alias>
+ <filename>*.rq</filename>
+ <filename>*.sparql</filename>
+ <mime_type>application/sparql-query</mime_type>
+ </config>
+ <rules>
+ <state name="string-escape">
+ <rule pattern="u[0-9A-Fa-f]{4}">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="U[0-9A-Fa-f]{8}">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".">
+ <token type="LiteralStringEscape"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="end-of-string">
+ <rule pattern="(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="\^\^">
+ <token type="Operator"/>
+ <pop depth="2"/>
+ </rule>
+ <rule>
+ <pop depth="2"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="((?i)select|construct|describe|ask|where|filter|group\s+by|minus|distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|offset|bindings|load|clear|drop|create|add|move|copy|insert\s+data|delete\s+data|delete\s+where|delete|insert|using\s+named|using|graph|default|named|all|optional|service|silent|bind|union|not\s+in|in|as|having|to|prefix|base)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(a)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(<(?:[^<>"{}|^`\\\x00-\x20])*>)">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="(_:[_\p{L}\p{N}](?:[-_.\p{L}\p{N}]*[-_\p{L}\p{N}])?)">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="[?$][_\p{L}\p{N}]+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="([\p{L}][-_.\p{L}\p{N}]*)?(\:)((?:[_:\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))(?:(?:[-_:.\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%]))*(?:[-_:\p{L}\p{N}]|(?:%[0-9A-Fa-f][0-9A-Fa-f])|(?:\\[ _~.\-!$&"()*+,;=/?#@%])))?)?">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="((?i)str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|contains|strstarts|strends|strbefore|strafter|year|month|day|hours|minutes|seconds|timezone|tz|now|md5|sha1|sha256|sha384|sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|count|sum|min|max|avg|sample|group_concat|separator)\b">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(true|false)">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="[+\-]?(\d+\.\d*[eE][+-]?\d+|\.?\d+[eE][+-]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+\-]?(\d+\.\d*|\.\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+\-]?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[(){}.;,:^\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="#[^\n]*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="triple-double-quoted-string"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="single-double-quoted-string"/>
+ </rule>
+ <rule pattern="'''">
+ <token type="LiteralString"/>
+ <push state="triple-single-quoted-string"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="single-single-quoted-string"/>
+ </rule>
+ </state>
+ <state name="triple-double-quoted-string">
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="end-of-string"/>
+ </rule>
+ <rule pattern="[^\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ <push state="string-escape"/>
+ </rule>
+ </state>
+ <state name="single-double-quoted-string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="end-of-string"/>
+ </rule>
+ <rule pattern="[^"\\\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ <push state="string-escape"/>
+ </rule>
+ </state>
+ <state name="triple-single-quoted-string">
+ <rule pattern="'''">
+ <token type="LiteralString"/>
+ <push state="end-of-string"/>
+ </rule>
+ <rule pattern="[^\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralStringEscape"/>
+ <push state="string-escape"/>
+ </rule>
+ </state>
+ <state name="single-single-quoted-string">
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="end-of-string"/>
+ </rule>
+ <rule pattern="[^'\\\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ <push state="string-escape"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,90 @@
+<lexer>
+ <config>
+ <name>SQL</name>
+ <alias>sql</alias>
+ <filename>*.sql</filename>
+ <mime_type>text/x-sql</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="--.*\n?">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comments"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="double-string"/>
+ </rule>
@@ -0,0 +1,63 @@
+<lexer>
+ <config>
+ <name>SquidConf</name>
+ <alias>squidconf</alias>
+ <alias>squid.conf</alias>
+ <alias>squid</alias>
+ <filename>squid.conf</filename>
+ <mime_type>text/x-squidconf</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="#">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
@@ -0,0 +1,548 @@
+<lexer>
+ <config>
+ <name>Standard ML</name>
+ <alias>sml</alias>
+ <filename>*.sml</filename>
+ <filename>*.sig</filename>
+ <filename>*.fun</filename>
+ <mime_type>text/x-standardml</mime_type>
+ <mime_type>application/x-standardml</mime_type>
+ </config>
+ <rules>
+ <state name="delimiters">
+ <rule pattern="\(|\[|\{">
+ <token type="Punctuation"/>
+ <push state="main"/>
+ </rule>
+ <rule pattern="\)|\]|\}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\b(let|if|local)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="main" state="main"/>
+ </rule>
+ <rule pattern="\b(struct|sig|while)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="main"/>
+ </rule>
+ <rule pattern="\b(do|else|end|in|then)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <push state="main"/>
+ </rule>
+ </state>
+ <state name="breakout">
+ <rule pattern="(?=\b(where|do|handle|if|sig|op|while|case|as|else|signature|andalso|struct|infixr|functor|in|structure|then|local|rec|end|fun|of|orelse|val|include|fn|with|exception|let|and|infix|sharing|datatype|type|abstype|withtype|eqtype|nonfix|raise|open)\b(?!\'))">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="tyvarseq">
+ <rule pattern="\s">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\'[\w\']*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="[a-zA-Z][\w']*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\)">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[!%&$#+\-/:<=>?@\\~`^|*]+">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="char">
+ <rule pattern="[^"\\]">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="\\[\\"abtnvfr]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\\^[\x40-\x5e]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\[0-9]{3}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\u[0-9a-fA-F]{4}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\\s+\\">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="datbind">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="\b(and)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="#pop" state="dname"/>
+ </rule>
+ <rule pattern="\b(withtype)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="#pop" state="tname"/>
+ </rule>
+ <rule pattern="\b(of)\b(?!\')">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(\|)(\s*)([a-zA-Z][\w']*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\|)(\s+)([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="breakout"/>
+ </rule>
+ <rule>
+ <include state="core"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="[^"\\]">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\\[\\"abtnvfr]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\\^[\x40-\x5e]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\[0-9]{3}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\u[0-9a-fA-F]{4}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\\\s+\\">
+ <token type="LiteralStringInterpol"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="tname">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="breakout"/>
+ </rule>
+ <rule pattern="\'[\w\']*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="tyvarseq"/>
+ </rule>
+ <rule pattern="=(?![!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="Punctuation"/>
+ <push state="#pop" state="typbind"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="dname">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="breakout"/>
+ </rule>
+ <rule pattern="\'[\w\']*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="tyvarseq"/>
+ </rule>
+ <rule pattern="(=)(\s*)(datatype)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="KeywordReserved"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="=(?![!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="Punctuation"/>
+ <push state="#pop" state="datbind" state="datcon"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="typbind">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="\b(and)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="#pop" state="tname"/>
+ </rule>
+ <rule>
+ <include state="breakout"/>
+ </rule>
+ <rule>
+ <include state="core"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="ename">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="(exception|and)\b(\s+)([a-zA-Z][\w']*)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(exception|and)\b(\s*)([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(of)\b(?!\')">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule>
+ <include state="breakout"/>
+ </rule>
+ <rule>
+ <include state="core"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ </rule>
+ </state>
+ <state name="vname">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="\'[\w\']*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="tyvarseq"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)(\s*)(=(?![!%&$#+\-/:<=>?@\\~`^|*]+))">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([!%&$#+\-/:<=>?@\\~`^|*]+)(\s*)(=(?![!%&$#+\-/:<=>?@\\~`^|*]+))">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)">
+ <token type="NameVariable"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="NameVariable"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="sname">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="breakout"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="main-fun">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="\s">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\b(fun|and)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="fname"/>
+ </rule>
+ <rule pattern="\b(val)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="#pop" state="main" state="vname"/>
+ </rule>
+ <rule pattern="\|">
+ <token type="Punctuation"/>
+ <push state="fname"/>
+ </rule>
+ <rule pattern="\b(case|handle)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="#pop" state="main"/>
+ </rule>
+ <rule>
+ <include state="delimiters"/>
+ </rule>
+ <rule>
+ <include state="core"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ </rule>
+ </state>
+ <state name="datcon">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="dotted">
+ <rule pattern="([a-zA-Z][\w']*)(\.)">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="Name"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ </rule>
+ </state>
+ <state name="main">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="\b(val|and)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="vname"/>
+ </rule>
+ <rule pattern="\b(fun)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="#pop" state="main-fun" state="fname"/>
+ </rule>
+ <rule>
+ <include state="delimiters"/>
+ </rule>
+ <rule>
+ <include state="core"/>
+ </rule>
+ <rule pattern="\S+">
+ <token type="Error"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^(*)]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="CommentMultiline"/>
+ <push/>
+ </rule>
+ <rule pattern="\*\)">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[(*)]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\(\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ </state>
+ <state name="core">
+ <rule pattern="(_|\}|\{|\)|;|,|\[|\(|\]|\.\.\.)">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="#"">
+ <token type="LiteralStringChar"/>
+ <push state="char"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="~?0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0wx[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0w\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="~?\d+\.\d+[eE]~?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="~?\d+\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="~?\d+[eE]~?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="~?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="#\s*[1-9][0-9]*">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="#\s*([a-zA-Z][\w']*)">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="#\s+([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="\b(datatype|abstype)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="dname"/>
+ </rule>
+ <rule pattern="(?=\b(exception)\b(?!\'))">
+ <token type="Text"/>
+ <push state="ename"/>
+ </rule>
+ <rule pattern="\b(functor|include|open|signature|structure)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="sname"/>
+ </rule>
+ <rule pattern="\b(type|eqtype)\b(?!\')">
+ <token type="KeywordReserved"/>
+ <push state="tname"/>
+ </rule>
+ <rule pattern="\'[\w\']*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)(\.)">
+ <token type="NameNamespace"/>
+ <push state="dotted"/>
+ </rule>
+ <rule pattern="\b(abstype|and|andalso|as|case|datatype|do|else|end|exception|fn|fun|handle|if|in|infix|infixr|let|local|nonfix|of|op|open|orelse|raise|rec|then|type|val|with|withtype|while|eqtype|functor|include|sharing|sig|signature|struct|structure|where)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\b(:|\|,=|=>|->|#|:>)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="fname">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="\'[\w\']*">
+ <token type="NameDecorator"/>
+ </rule>
+ <rule pattern="\(">
+ <token type="Punctuation"/>
+ <push state="tyvarseq"/>
+ </rule>
+ <rule pattern="([a-zA-Z][\w']*)">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="([!%&$#+\-/:<=>?@\\~`^|*]+)">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,85 @@
+<lexer>
+ <config>
+ <name>stas</name>
+ <filename>*.stas</filename>
+ </config>
+ <rules>
+ <state name="string-double-quoted">
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string-single-quoted">
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\']+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="string-char-literal">
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\`]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="(\n|\s)+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(?<!\S)(fn|argc|argv|swap|dup|over|over2|rot|rot4|drop|w8|w16|w32|w64|r8|r16|r32|r64|syscall0|syscall1|syscall2|syscall3|syscall4|syscall5|syscall6|_breakpoint|assert|const|auto|reserve|pop|include|addr|if|else|elif|while|break|continue|ret)(?!\S)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(?<!\S)(\+|\-|\*|\/|\%|\%\%|\+\+|\-\-|>>|<<)(?!\S)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(?<!\S)(\=|\!\=|>|<|>\=|<\=|>s|<s|>\=s|<\=s)(?!\S)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(?<!\S)(\&|\||\^|\~|\!|-\>)(?!\S)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(?<!\S)\-?(\d+)(?!\S)">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="(?<!\S);.*(\S|\n)">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="string-single-quoted"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string-double-quoted"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringChar"/>
+ <push state="string-char-literal"/>
+ </rule>
+ <rule pattern="(?<!\S)[{}](?!\S)">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(?<!\S)[^\s]+(?!\S)">
+ <token type="Name"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,132 @@
+<lexer>
+ <config>
+ <name>Stylus</name>
+ <alias>stylus</alias>
+ <filename>*.styl</filename>
+ <mime_type>text/x-styl</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="values">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(\#[a-f0-9]{3,6})">
+ <token type="LiteralNumberHex"/>
+ </rule>
@@ -0,0 +1,207 @@
+<lexer>
+ <config>
+ <name>Swift</name>
+ <alias>swift</alias>
+ <filename>*.swift</filename>
+ <mime_type>text/x-swift</mime_type>
+ </config>
+ <rules>
+ <state name="comment">
+ <rule pattern=":param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):">
+ <token type="CommentSpecial"/>
+ </rule>
+ </state>
+ <state name="preproc">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule pattern="[A-Za-z]\w*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="comment-single">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="comment"/>
+ </rule>
+ <rule pattern="[^\n]">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="module">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="\\\(">
+ <token type="LiteralStringInterpol"/>
+ <push state="string-intp"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="string-intp">
+ <rule pattern="\(">
+ <token type="LiteralStringInterpol"/>
+ <push/>
+ </rule>
+ <rule pattern="\)">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//">
+ <token type="CommentSingle"/>
+ <push state="comment-single"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment-multi"/>
+ </rule>
+ <rule pattern="#(if|elseif|else|endif|available)\b">
+ <token type="CommentPreproc"/>
+ <push state="preproc"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
@@ -0,0 +1,63 @@
+<lexer>
+ <config>
+ <name>SYSTEMD</name>
+ <alias>systemd</alias>
+ <filename>*.automount</filename>
+ <filename>*.device</filename>
+ <filename>*.dnssd</filename>
+ <filename>*.link</filename>
+ <filename>*.mount</filename>
+ <filename>*.netdev</filename>
+ <filename>*.network</filename>
+ <filename>*.path</filename>
+ <filename>*.scope</filename>
+ <filename>*.service</filename>
+ <filename>*.slice</filename>
+ <filename>*.socket</filename>
+ <filename>*.swap</filename>
+ <filename>*.target</filename>
+ <filename>*.timer</filename>
+ <mime_type>text/plain</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[;#].*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\[.*?\]$">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(.*?)(=)(.*)(\\\n)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Operator"/>
+ <token type="LiteralString"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="continuation"/>
+ </rule>
+ <rule pattern="(.*?)(=)(.*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Operator"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="continuation">
+ <rule pattern="(.*?)(\\\n)">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(.*)">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,181 @@
+<lexer>
+ <config>
+ <name>systemverilog</name>
+ <alias>systemverilog</alias>
+ <alias>sv</alias>
+ <filename>*.sv</filename>
+ <filename>*.svh</filename>
+ <mime_type>text/x-systemverilog</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="macro">
+ <rule pattern="[^/\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=\\)\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="[\w:]+\*?">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^\s*`define">
+ <token type="CommentPreproc"/>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="^(\s*)(package)(\s+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)(import)(\s+)("DPI(?:-C)?")(\s+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ <token type="LiteralString"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)(import)(\s+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="/(\\\n)?/(\n|(.|\n)*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="[{}#@]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="L?"">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="([0-9]+)|(\'h)[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="([0-9]+)|(\'b)[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="([0-9]+)|(\'d)[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="([0-9]+)|(\'o)[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="\'[01xz]">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+[Ll]?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.;\']">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="`[a-zA-Z_]\w*">
+ <token type="NameConstant"/>
+ </rule>
@@ -0,0 +1,69 @@
+<lexer>
+ <config>
+ <name>TableGen</name>
+ <alias>tablegen</alias>
+ <filename>*.td</filename>
+ <mime_type>text/x-tablegen</mime_type>
+ </config>
+ <rules>
+ <state name="whitespace">
+ <rule pattern="(\n|\s)+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="keyword">
+ <rule pattern="(multiclass|foreach|string|class|field|defm|bits|code|list|def|int|let|dag|bit|in)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="macro"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="c?"[^"]*?"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule>
+ <include state="keyword"/>
+ </rule>
+ <rule pattern="\$[_a-zA-Z][_\w]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\d*[_a-zA-Z][_\w]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\[\{[\w\W]*?\}\]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[+-]?\d+|0x[\da-fA-F]+|0b[01]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[=<>{}\[\]()*.,!:;]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="macro">
+ <rule pattern="(#include\s+)("[^"]*")">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^\s*#(ifdef|ifndef)\s+[_\w][_\w\d]*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="^\s*#define\s+[_\w][_\w\d]*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="^\s*#endif">
+ <token type="CommentPreproc"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,43 @@
+
+<lexer>
+ <config>
+ <name>Tal</name>
+ <alias>tal</alias>
+ <alias>uxntal</alias>
+ <filename>*.tal</filename>
+ <mime_type>text/x-uxntal</mime_type>
+ </config>
+ <rules>
+ <state name="comment">
+ <rule pattern="(?<!\S)\((?!\S)"><token type="CommentMultiline"/><push/></rule>
+ <rule pattern="(?<!\S)\)(?!\S)"><token type="CommentMultiline"/><pop depth="1"/></rule>
+ <rule pattern="[^()]+"><token type="CommentMultiline"/></rule>
+ <rule pattern="[()]+"><token type="CommentMultiline"/></rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="(?<!\S)\((?!\S)"><token type="CommentMultiline"/><push state="comment"/></rule>
+ <rule pattern="(?<!\S)(BRK|LIT|INC|POP|DUP|NIP|SWP|OVR|ROT|EQU|NEQ|GTH|LTH|JMP|JCN|JSR|STH|LDZ|STZ|LDR|STR|LDA|STA|DEI|DEO|ADD|SUB|MUL|DIV|AND|ORA|EOR|SFT)2?k?r?(?!\S)"><token type="KeywordReserved"/></rule>
+ <rule pattern="[][{}](?!\S)"><token type="Punctuation"/></rule>
+ <rule pattern="#([0-9a-f]{2}){1,2}(?!\S)"><token type="LiteralNumberHex"/></rule>
+ <rule pattern=""\S+"><token type="LiteralString"/></rule>
+ <rule pattern="([0-9a-f]{2}){1,2}(?!\S)"><token type="Literal"/></rule>
+ <rule pattern="[|$][0-9a-f]{1,4}(?!\S)"><token type="KeywordDeclaration"/></rule>
+ <rule pattern="%\S+"><token type="NameDecorator"/></rule>
+ <rule pattern="@\S+"><token type="NameFunction"/></rule>
+ <rule pattern="&\S+"><token type="NameLabel"/></rule>
+ <rule pattern="/\S+"><token type="NameTag"/></rule>
+ <rule pattern="\.\S+"><token type="NameVariableMagic"/></rule>
+ <rule pattern=",\S+"><token type="NameVariableInstance"/></rule>
+ <rule pattern=";\S+"><token type="NameVariableGlobal"/></rule>
+ <rule pattern="-\S+"><token type="Literal"/></rule>
+ <rule pattern="_\S+"><token type="Literal"/></rule>
+ <rule pattern="=\S+"><token type="Literal"/></rule>
+ <rule pattern="!\S+"><token type="NameFunction"/></rule>
+ <rule pattern="\?\S+"><token type="NameFunction"/></rule>
+ <rule pattern="~\S+"><token type="KeywordNamespace"/></rule>
+ <rule pattern="\S+"><token type="NameFunction"/></rule>
+ </state>
+ </rules>
+</lexer>
+
@@ -0,0 +1,135 @@
+<lexer>
+ <config>
+ <name>TASM</name>
+ <alias>tasm</alias>
+ <filename>*.asm</filename>
+ <filename>*.ASM</filename>
+ <filename>*.tasm</filename>
+ <mime_type>text/x-tasm</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="preproc">
+ <rule pattern="[^;\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern=";.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="[\n\r]">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\[\n\r]">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";.*">
+ <token type="CommentSingle"/>
+ </rule>
+ </state>
+ <state name="punctuation">
+ <rule pattern="[,():\[\]]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[&|^<>+*=/%~-]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[$]+">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="seg|wrt|strict">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="byte|[dq]?word">
+ <token type="KeywordType"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^\s*%">
+ <token type="CommentPreproc"/>
+ <push state="preproc"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="[@a-z$._?][\w$.?#@~]*:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="BITS|USE16|USE32|SECTION|SEGMENT|ABSOLUTE|EXTERN|GLOBAL|ORG|ALIGN|STRUC|ENDSTRUC|ENDS|COMMON|CPU|GROUP|UPPERCASE|INCLUDE|EXPORT|LIBRARY|MODULE|PROC|ENDP|USES|ARG|DATASEG|UDATASEG|END|IDEAL|P386|MODEL|ASSUME|CODESEG|SIZE">
+ <token type="Keyword"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="([@a-z$._?][\w$.?#@~]*)(\s+)(db|dd|dw|T[A-Z][a-z]+)">
+ <bygroups>
+ <token type="NameConstant"/>
+ <token type="KeywordDeclaration"/>
+ <token type="KeywordDeclaration"/>
+ </bygroups>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="(?:res|d)[bwdqt]|times">
+ <token type="KeywordDeclaration"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="[@a-z$._?][\w$.?#@~]*">
+ <token type="NameFunction"/>
+ <push state="instruction-args"/>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="instruction-args">
+ <rule pattern=""(\\"|[^"\n])*"|'(\\'|[^'\n])*'|`(\\`|[^`\n])*`">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(?:0x[0-9a-f]+|$0[0-9a-f]*|[0-9]+[0-9a-f]*h)">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-7]+q">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="[01]+b">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="[0-9]+\.e?[0-9]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule pattern="r[0-9][0-5]?[bwd]|[a-d][lh]|[er]?[a-d]x|[er]?[sb]p|[er]?[sd]i|[c-gs]s|st[0-7]|mm[0-7]|cr[0-4]|dr[0-367]|tr[3-7]">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[@a-z$._?][\w$.?#@~]*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="(\\\s*)(;.*)([\r\n])">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentSingle"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[\r\n]+">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,272 @@
+<lexer>
+ <config>
+ <name>Tcl</name>
+ <alias>tcl</alias>
+ <filename>*.tcl</filename>
+ <filename>*.rvt</filename>
+ <mime_type>text/x-tcl</mime_type>
+ <mime_type>text/x-script.tcl</mime_type>
+ <mime_type>application/x-tcl</mime_type>
+ </config>
+ <rules>
+ <state name="command-in-bracket">
+ <rule pattern="\b(namespace|continue|variable|uplevel|foreach|return|update|elseif|global|rename|switch|upvar|error|vwait|catch|break|unset|array|apply|trace|after|while|then|else|expr|eval|proc|for|set|if)\b">
+ <token type="Keyword"/>
+ <push state="params-in-bracket"/>
+ </rule>
+ <rule pattern="\b(platform::shell|pkg::create|pkg_mkIndex|fconfigure|re_syntax|fileevent|platform|fblocked|lreverse|mathfunc|encoding|registry|lreplace|history|bgerror|llength|lsearch|linsert|lassign|lappend|refchan|unknown|package|lrepeat|msgcat|mathop|format|interp|lrange|string|source|lindex|socket|concat|regsub|regexp|loadTk|memory|binary|append|unload|subst|split|lsort|clock|close|flush|fcopy|chan|glob|time|gets|http|dict|file|puts|tell|join|read|exit|exec|open|list|scan|seek|incr|info|lset|load|dde|pwd|pid|eof|tm|cd)\b">
+ <token type="NameBuiltin"/>
+ <push state="params-in-bracket"/>
+ </rule>
+ <rule pattern="([\w.-]+)">
+ <token type="NameVariable"/>
+ <push state="params-in-bracket"/>
+ </rule>
+ <rule pattern="#">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ </state>
+ <state name="command-in-paren">
+ <rule pattern="\b(namespace|continue|variable|uplevel|foreach|return|update|elseif|global|rename|switch|upvar|error|vwait|catch|break|unset|array|apply|trace|after|while|then|else|expr|eval|proc|for|set|if)\b">
+ <token type="Keyword"/>
+ <push state="params-in-paren"/>
+ </rule>
+ <rule pattern="\b(platform::shell|pkg::create|pkg_mkIndex|fconfigure|re_syntax|fileevent|platform|fblocked|lreverse|mathfunc|encoding|registry|lreplace|history|bgerror|llength|lsearch|linsert|lassign|lappend|refchan|unknown|package|lrepeat|msgcat|mathop|format|interp|lrange|string|source|lindex|socket|concat|regsub|regexp|loadTk|memory|binary|append|unload|subst|split|lsort|clock|close|flush|fcopy|chan|glob|time|gets|http|dict|file|puts|tell|join|read|exit|exec|open|list|scan|seek|incr|info|lset|load|dde|pwd|pid|eof|tm|cd)\b">
+ <token type="NameBuiltin"/>
+ <push state="params-in-paren"/>
+ </rule>
+ <rule pattern="([\w.-]+)">
+ <token type="NameVariable"/>
+ <push state="params-in-paren"/>
+ </rule>
+ <rule pattern="#">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ </state>
+ <state name="command-in-brace">
+ <rule pattern="\b(namespace|continue|variable|uplevel|foreach|return|update|elseif|global|rename|switch|upvar|error|vwait|catch|break|unset|array|apply|trace|after|while|then|else|expr|eval|proc|for|set|if)\b">
+ <token type="Keyword"/>
+ <push state="params-in-brace"/>
+ </rule>
+ <rule pattern="\b(platform::shell|pkg::create|pkg_mkIndex|fconfigure|re_syntax|fileevent|platform|fblocked|lreverse|mathfunc|encoding|registry|lreplace|history|bgerror|llength|lsearch|linsert|lassign|lappend|refchan|unknown|package|lrepeat|msgcat|mathop|format|interp|lrange|string|source|lindex|socket|concat|regsub|regexp|loadTk|memory|binary|append|unload|subst|split|lsort|clock|close|flush|fcopy|chan|glob|time|gets|http|dict|file|puts|tell|join|read|exit|exec|open|list|scan|seek|incr|info|lset|load|dde|pwd|pid|eof|tm|cd)\b">
+ <token type="NameBuiltin"/>
+ <push state="params-in-brace"/>
+ </rule>
+ <rule pattern="([\w.-]+)">
+ <token type="NameVariable"/>
+ <push state="params-in-brace"/>
+ </rule>
+ <rule pattern="#">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ </state>
+ <state name="basic">
+ <rule pattern="\(">
+ <token type="Keyword"/>
+ <push state="paren"/>
+ </rule>
+ <rule pattern="\[">
+ <token type="Keyword"/>
+ <push state="bracket"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Keyword"/>
+ <push state="brace"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(eq|ne|in|ni)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ <state name="params-in-bracket">
+ <rule pattern="\]">
+ <token type="Keyword"/>
+ <push state="#pop" state="#pop"/>
+ </rule>
+ <rule>
+ <include state="params"/>
+ </rule>
+ </state>
+ <state name="data">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="0x[a-fA-F0-9]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="0[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="\d+\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\$([\w.:-]+)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="([\w.:-]+)">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="command">
+ <rule pattern="\b(namespace|continue|variable|uplevel|foreach|return|update|elseif|global|rename|switch|upvar|error|vwait|catch|break|unset|array|apply|trace|after|while|then|else|expr|eval|proc|for|set|if)\b">
+ <token type="Keyword"/>
+ <push state="params"/>
+ </rule>
+ <rule pattern="\b(platform::shell|pkg::create|pkg_mkIndex|fconfigure|re_syntax|fileevent|platform|fblocked|lreverse|mathfunc|encoding|registry|lreplace|history|bgerror|llength|lsearch|linsert|lassign|lappend|refchan|unknown|package|lrepeat|msgcat|mathop|format|interp|lrange|string|source|lindex|socket|concat|regsub|regexp|loadTk|memory|binary|append|unload|subst|split|lsort|clock|close|flush|fcopy|chan|glob|time|gets|http|dict|file|puts|tell|join|read|exit|exec|open|list|scan|seek|incr|info|lset|load|dde|pwd|pid|eof|tm|cd)\b">
+ <token type="NameBuiltin"/>
+ <push state="params"/>
+ </rule>
+ <rule pattern="([\w.-]+)">
+ <token type="NameVariable"/>
+ <push state="params"/>
+ </rule>
+ <rule pattern="#">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ </state>
+ <state name="params-in-brace">
+ <rule pattern="\}">
+ <token type="Keyword"/>
+ <push state="#pop" state="#pop"/>
+ </rule>
+ <rule>
+ <include state="params"/>
+ </rule>
+ </state>
+ <state name="string-square">
+ <rule pattern="\[">
+ <token type="LiteralStringDouble"/>
+ <push state="string-square"/>
+ </rule>
+ <rule pattern="(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\]">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="bracket">
+ <rule pattern="\]">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="command-in-bracket"/>
+ </rule>
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule>
+ <include state="data"/>
+ </rule>
+ </state>
+ <state name="params-in-paren">
+ <rule pattern="\)">
+ <token type="Keyword"/>
+ <push state="#pop" state="#pop"/>
+ </rule>
+ <rule>
+ <include state="params"/>
+ </rule>
+ </state>
+ <state name="paren">
+ <rule pattern="\)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="command-in-paren"/>
+ </rule>
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule>
+ <include state="data"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern=".*[^\\]\n">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=".*\\\n">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="command"/>
+ </rule>
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule>
+ <include state="data"/>
+ </rule>
+ <rule pattern="\}">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="brace">
+ <rule pattern="\}">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="command-in-brace"/>
+ </rule>
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule>
+ <include state="data"/>
+ </rule>
+ </state>
+ <state name="params">
+ <rule pattern=";">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(else|elseif|then)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule>
+ <include state="data"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="\[">
+ <token type="LiteralStringDouble"/>
+ <push state="string-square"/>
+ </rule>
+ <rule pattern="(?s)(\\\\|\\[0-7]+|\\.|[^"\\])">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,121 @@
+<lexer>
+ <config>
+ <name>Tcsh</name>
+ <alias>tcsh</alias>
+ <alias>csh</alias>
+ <filename>*.tcsh</filename>
+ <filename>*.csh</filename>
+ <mime_type>application/x-csh</mime_type>
+ </config>
+ <rules>
+ <state name="basic">
+ <rule pattern="\b(if|endif|else|while|then|foreach|case|default|continue|goto|breaksw|end|switch|endsw)\s*\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(alias|alloc|bg|bindkey|break|builtins|bye|caller|cd|chdir|complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|glob|getspath|hashstat|history|hup|inlib|jobs|kill|limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|source|stop|suspend|source|suspend|telltc|time|umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|ver|wait|warp|watchlog|where|which)\s*\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="#.*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\\[\w\W]">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="(\b\w+)(\s*)(=)">
+ <bygroups>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[\[\]{}()=]+">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="<<\s*(\'?)\\?(\w+)[\w\W]+?\2">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="data">
+ <rule pattern="(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[^=\s\[\]{}()$"\'`\\;#]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\d+(?= |\Z)">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\$#?(\w+|.)">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ <state name="curly">
+ <rule pattern="\}">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=":-">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[^}:"\'`$]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="paren">
+ <rule pattern="\)">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="backticks">
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="basic"/>
+ </rule>
+ <rule pattern="\$\(">
+ <token type="Keyword"/>
+ <push state="paren"/>
+ </rule>
+ <rule pattern="\$\{#?">
+ <token type="Keyword"/>
+ <push state="curly"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <push state="backticks"/>
+ </rule>
+ <rule>
+ <include state="data"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,75 @@
+<lexer>
+ <config>
+ <name>Termcap</name>
+ <alias>termcap</alias>
+ <filename>termcap</filename>
+ <filename>termcap.src</filename>
+ </config>
+ <rules>
+ <state name="defs">
+ <rule pattern="\\\n[ \t]*">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n[ \t]*">
+ <token type="Text"/>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="(#)([0-9]+)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="LiteralNumber"/>
+ </bygroups>
+ </rule>
+ <rule pattern="=">
+ <token type="Operator"/>
+ <push state="data"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[^\s:=#]+">
+ <token type="NameClass"/>
+ </rule>
+ </state>
+ <state name="data">
+ <rule pattern="\\072">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^:\\]+">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Literal"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="^[^\s#:|]+">
+ <token type="NameTag"/>
+ <push state="names"/>
+ </rule>
+ </state>
+ <state name="names">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern=":">
+ <token type="Punctuation"/>
+ <push state="defs"/>
+ </rule>
+ <rule pattern="\|">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[^:|]+">
+ <token type="NameAttribute"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,84 @@
+<lexer>
+ <config>
+ <name>Terminfo</name>
+ <alias>terminfo</alias>
+ <filename>terminfo</filename>
+ <filename>terminfo.src</filename>
+ </config>
+ <rules>
+ <state name="names">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(,)([ \t]*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="defs"/>
+ </rule>
+ <rule pattern="\|">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[^,|]+">
+ <token type="NameAttribute"/>
+ </rule>
+ </state>
+ <state name="defs">
+ <rule pattern="\n[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="(#)([0-9]+)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="LiteralNumber"/>
+ </bygroups>
+ </rule>
+ <rule pattern="=">
+ <token type="Operator"/>
+ <push state="data"/>
+ </rule>
+ <rule pattern="(,)([ \t]*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^\s,=#]+">
+ <token type="NameClass"/>
+ </rule>
+ </state>
+ <state name="data">
+ <rule pattern="\\[,\\]">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="(,)([ \t]*)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\\,]+">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Literal"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="^[^\s#,|]+">
+ <token type="NameTag"/>
+ <push state="names"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,140 @@
+<lexer>
+ <config>
+ <name>Terraform</name>
+ <alias>terraform</alias>
+ <alias>tf</alias>
+ <filename>*.tf</filename>
+ <mime_type>application/x-tf</mime_type>
+ <mime_type>application/x-terraform</mime_type>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\\\\"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interp-inside"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="[^"\\\\$]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="interp-inside">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[\[\](),.{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="-?[0-9]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="=>">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(false|true)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="/(?s)\*(((?!\*/).)*)\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\s*(#|//).*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="([a-zA-Z]\w*)(\s*)(=(?!>))">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Text"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^\s*(provisioner|variable|resource|provider|module|output|data)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(for|in)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(module|count|data|each|var)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(parseint|signum|floor|ceil|log|max|min|abs|pow)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(trimsuffix|formatlist|trimprefix|trimspace|regexall|replace|indent|strrev|format|substr|chomp|split|title|regex|lower|upper|trim|join)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[^.](setintersection|coalescelist|setsubtract|setproduct|matchkeys|chunklist|transpose|contains|distinct|coalesce|setunion|reverse|flatten|element|compact|lookup|length|concat|values|zipmap|range|merge|slice|index|list|sort|keys|map)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[^.](base64decode|base64encode|base64gzip|jsondecode|jsonencode|yamldecode|yamlencode|csvdecode|urlencode)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(templatefile|filebase64|fileexists|pathexpand|basename|abspath|fileset|dirname|file)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(formatdate|timestamp|timeadd)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(filebase64sha256|filebase64sha512|base64sha512|base64sha256|filesha256|rsadecrypt|filesha512|filesha1|filemd5|uuidv5|bcrypt|sha256|sha512|sha1|uuid|md5)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(cidrnetmask|cidrsubnet|cidrhost)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(tostring|tonumber|tobool|tolist|tomap|toset|can|try)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="=(?!>)|\+|-|\*|\/|:|!|%|>|<(?!<)|>=|<=|==|!=|&&|\||\?">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\n|\s+|\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[a-zA-Z]\w*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="(?s)(<<-?)(\w+)(\n\s*(?:(?!\2).)*\s*\n\s*)(\2)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Operator"/>
+ <token type="LiteralString"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="declaration">
+ <rule pattern="(\s*)("(?:\\\\|\\"|[^"])*")(\s*)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="NameVariable"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\{">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,113 @@
+<lexer>
+ <config>
+ <name>TeX</name>
+ <alias>tex</alias>
+ <alias>latex</alias>
+ <filename>*.tex</filename>
+ <filename>*.aux</filename>
+ <filename>*.toc</filename>
+ <mime_type>text/x-tex</mime_type>
+ <mime_type>text/x-latex</mime_type>
+ </config>
+ <rules>
+ <state name="displaymath">
+ <rule pattern="\\\]">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\$\$">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule>
+ <include state="math"/>
+ </rule>
+ </state>
+ <state name="command">
+ <rule pattern="\[.*?\]">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="\*">
+ <token type="Keyword"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="general">
+ <rule pattern="%.*?\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="[{}]">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[&_^]">
+ <token type="NameBuiltin"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\\\[">
+ <token type="LiteralStringBacktick"/>
+ <push state="displaymath"/>
+ </rule>
+ <rule pattern="\\\(">
+ <token type="LiteralString"/>
+ <push state="inlinemath"/>
+ </rule>
+ <rule pattern="\$\$">
+ <token type="LiteralStringBacktick"/>
+ <push state="displaymath"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralString"/>
+ <push state="inlinemath"/>
+ </rule>
+ <rule pattern="\\([a-zA-Z]+|.)">
+ <token type="Keyword"/>
+ <push state="command"/>
+ </rule>
+ <rule pattern="\\$">
+ <token type="Keyword"/>
+ </rule>
+ <rule>
+ <include state="general"/>
+ </rule>
+ <rule pattern="[^\\$%&_^{}]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="math">
+ <rule pattern="\\([a-zA-Z]+|.)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule>
+ <include state="general"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="[-=!+*/()\[\]]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[^=!+*/()\[\]\\$%&_^{}0-9-]+">
+ <token type="NameBuiltin"/>
+ </rule>
+ </state>
+ <state name="inlinemath">
+ <rule pattern="\\\)">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="math"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,154 @@
+<lexer>
+ <config>
+ <name>Thrift</name>
+ <alias>thrift</alias>
+ <filename>*.thrift</filename>
+ <mime_type>application/x-thrift</mime_type>
+ </config>
+ <rules>
+ <state name="class">
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(async|oneway|extends|throws|required|optional)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(const|typedef)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(smalltalk_category|smalltalk_prefix|delphi_namespace|csharp_namespace|ruby_namespace|xsd_namespace|cpp_namespace|php_namespace|xsd_nillable|xsd_optional|java_package|cocoa_prefix|perl_package|cpp_include|py_module|xsd_attrs|cpp_type|xsd_all|include)\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(double|binary|string|slist|senum|bool|void|byte|list|i64|map|set|i32|i16)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\b(__NAMESPACE__|synchronized|__FUNCTION__|__METHOD__|endforeach|implements|enddeclare|instanceof|transient|endswitch|protected|interface|__CLASS__|continue|__FILE__|abstract|function|endwhile|unsigned|register|volatile|__LINE__|declare|foreach|default|__DIR__|private|finally|dynamic|virtual|lambda|elseif|inline|switch|unless|endfor|delete|import|return|module|ensure|native|rescue|assert|sizeof|static|global|except|public|float|BEGIN|super|endif|yield|elsif|throw|clone|class|catch|until|break|retry|begin|raise|alias|while|print|undef|exec|with|when|case|redo|args|elif|this|then|self|goto|else|pass|next|var|for|xor|END|not|try|del|and|def|new|use|nil|end|if|do|is|or|in|as)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="[+-]?(\d+\.\d+([eE][+-]?\d+)?|\.?\d+[eE][+-]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+-]?0x[0-9A-Fa-f]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[+-]?[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <combined state="stringescape" state="dqs"/>
+ </rule>
+ <rule pattern="\'">
+ <token type="LiteralStringSingle"/>
+ <combined state="stringescape" state="sqs"/>
+ </rule>
+ <rule pattern="(namespace)(\s+)">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="namespace"/>
+ </rule>
+ <rule pattern="(enum|union|struct|service|exception)(\s+)">
+ <bygroups>
+ <token type="KeywordDeclaration"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ <push state="class"/>
+ </rule>
+ <rule pattern="((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)((?:[^\W\d]|\$)[\w$]*)(\s*)(\()">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="NameFunction"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule pattern="[&=]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[:;,{}()<>\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[a-zA-Z_](\.\w|\w)*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="dqs">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="namespace">
+ <rule pattern="[a-z*](\.\w|\w)*">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\n">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ </state>
+ <state name="comments">
+ <rule pattern="#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="/\*[\w\W]*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="stringescape">
+ <rule pattern="\\([\\nrt"\'])">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ </state>
+ <state name="sqs">
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\\\'\n]+">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,44 @@
+<lexer>
+ <config>
+ <name>TOML</name>
+ <alias>toml</alias>
+ <filename>*.toml</filename>
+ <filename>Pipfile</filename>
+ <filename>poetry.lock</filename>
+ <mime_type>text/x-toml</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#.*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(false|true)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="\d\d\d\d-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d\+)?(Z|[+-]\d{2}:\d{2})">
+ <token type="LiteralDate"/>
+ </rule>
+ <rule pattern="[+-]?[0-9](_?\d)*\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+-]?[0-9](_?\d)*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="[.,=\[\]{}]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[A-Za-z0-9_-]+">
+ <token type="NameOther"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,81 @@
+<lexer>
+ <config>
+ <name>TradingView</name>
+ <alias>tradingview</alias>
+ <alias>tv</alias>
+ <filename>*.tv</filename>
+ <mime_type>text/x-tradingview</mime_type>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[^\S\n]+|\n|[()]">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(//.*?)(\n)">
+ <bygroups>
+ <token type="CommentSingle"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern=">=|<=|==|!=|>|<|\?|-|\+|\*|\/|%|\[|\]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[:,.]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="=">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"\n])*["\n]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'\\.'|'[^\\]'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[0-9](\.[0-9]*)?([eE][+-][0-9]+)?">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="#[a-fA-F0-9]{8}|#[a-fA-F0-9]{6}|#[a-fA-F0-9]{3}">
+ <token type="LiteralStringOther"/>
+ </rule>
@@ -0,0 +1,137 @@
+<lexer>
+ <config>
+ <name>Transact-SQL</name>
+ <alias>tsql</alias>
+ <alias>t-sql</alias>
+ <mime_type>text/x-tsql</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="--(?m).*?$\n?">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="multiline-comments"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralStringSingle"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralStringName"/>
+ <push state="quoted-ident"/>
+ </rule>
+ <rule pattern="(\*=|!=|!>|\^=|<=|<>|\|=|&=|>=|%=|\+=|/=|-=|!<|::|/|-|%|\+|&|>|\||=|\^|<|~|\*)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(intersect|between|except|exists|union|some|like|all|any|not|and|or|in)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="(uniqueidentifier|datetimeoffset|smalldatetime|hierarchyid|sql_variant|smallmoney|varbinary|datetime2|timestamp|datetime|smallint|nvarchar|decimal|tinyint|varchar|numeric|binary|bigint|cursor|image|nchar|money|float|table|ntext|text|time|real|date|char|int|bit|xml)\b">
+ <token type="NameClass"/>
+ </rule>
@@ -0,0 +1,82 @@
+<lexer>
+ <config>
+ <name>Turing</name>
+ <alias>turing</alias>
+ <filename>*.turing</filename>
+ <filename>*.tu</filename>
+ <mime_type>text/x-turing</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="%(.*?)\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(var|fcn|function|proc|procedure|process|class|end|record|type|begin|case|loop|for|const|union|monitor|module|handler)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(all|asm|assert|bind|bits|body|break|by|cheat|checked|close|condition|decreasing|def|deferred|else|elsif|exit|export|external|flexible|fork|forward|free|get|if|implement|import|include|inherit|init|invariant|label|new|objectclass|of|opaque|open|packed|pause|pervasive|post|pre|priority|put|quit|read|register|result|seek|self|set|signal|skip|tag|tell|then|timeout|to|unchecked|unqualified|wait|when|write)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(true|false)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(addressint|boolean|pointer|string|array|real4|real8|nat1|int8|int4|int2|nat2|nat4|nat8|int1|real|char|enum|nat|int)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+\.\d*([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\.\d+([Ee][-+]\d+)?i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+[Ee][-+]\d+i">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\.\d+([eE][+\-]?\d+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(0|[1-9][0-9]*)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(div|mod|rem|\*\*|=|<|>|>=|<=|not=|not|and|or|xor|=>|in|shl|shr|->|~|~=|~in|&|:=|\.\.|[\^+\-*/&#])">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[()\[\]{}.,:]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[^\W\d]\w*">
+ <token type="NameOther"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,170 @@
+<lexer>
+ <config>
+ <name>Turtle</name>
+ <alias>turtle</alias>
+ <filename>*.ttl</filename>
+ <mime_type>text/turtle</mime_type>
+ <mime_type>application/x-turtle</mime_type>
+ <case_insensitive>true</case_insensitive>
+ <not_multiline>true</not_multiline>
+ </config>
+ <rules>
+ <state name="triple-double-quoted-string">
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="end-of-string"/>
+ </rule>
+ <rule pattern="[^\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ <push state="string-escape"/>
+ </rule>
+ </state>
+ <state name="single-double-quoted-string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="end-of-string"/>
+ </rule>
+ <rule pattern="[^"\\\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ <push state="string-escape"/>
+ </rule>
+ </state>
+ <state name="triple-single-quoted-string">
+ <rule pattern="'''">
+ <token type="LiteralString"/>
+ <push state="end-of-string"/>
+ </rule>
+ <rule pattern="[^\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ <push state="string-escape"/>
+ </rule>
+ </state>
+ <state name="single-single-quoted-string">
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="end-of-string"/>
+ </rule>
+ <rule pattern="[^'\\\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ <push state="string-escape"/>
+ </rule>
+ </state>
+ <state name="string-escape">
+ <rule pattern=".">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="end-of-string">
+ <rule pattern="(@)([a-z]+(:?-[a-z0-9]+)*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="GenericEmph"/>
+ <token type="GenericEmph"/>
+ </bygroups>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="(\^\^)(<[^<>"{}|^`\\\x00-\x20]*>)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="GenericEmph"/>
+ </bygroups>
+ <pop depth="2"/>
+ </rule>
+ <rule pattern="(\^\^)((?:[a-z][\w-]*)?\:)([a-z][\w-]*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="GenericEmph"/>
+ <token type="GenericEmph"/>
+ </bygroups>
+ <pop depth="2"/>
+ </rule>
+ <rule>
+ <pop depth="2"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="(@base|BASE)(\s+)(<[^<>"{}|^`\\\x00-\x20]*>)(\s*)(\.?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariable"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(@prefix|PREFIX)(\s+)((?:[a-z][\w-]*)?\:)(\s+)(<[^<>"{}|^`\\\x00-\x20]*>)(\s*)(\.?)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="NameNamespace"/>
+ <token type="TextWhitespace"/>
+ <token type="NameVariable"/>
+ <token type="TextWhitespace"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?<=\s)a(?=\s)">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(<[^<>"{}|^`\\\x00-\x20]*>)">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="((?:[a-z][\w-]*)?\:)([a-z][\w-]*)">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="#[^\n]+">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="\b(true|false)\b">
+ <token type="Literal"/>
+ </rule>
+ <rule pattern="[+\-]?\d*\.\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+\-]?\d*(:?\.\d+)?E[+\-]?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[+\-]?\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[\[\](){}.;,:^]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=""""">
+ <token type="LiteralString"/>
+ <push state="triple-double-quoted-string"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="single-double-quoted-string"/>
+ </rule>
+ <rule pattern="'''">
+ <token type="LiteralString"/>
+ <push state="triple-single-quoted-string"/>
+ </rule>
+ <rule pattern="'">
+ <token type="LiteralString"/>
+ <push state="single-single-quoted-string"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,155 @@
+<lexer>
+ <config>
+ <name>Twig</name>
+ <alias>twig</alias>
+ <filename>*.twig</filename>
+ <mime_type>application/x-twig</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="var">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(-?)(\}\})">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="varnames"/>
+ </rule>
+ </state>
+ <state name="tag">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(-?)(%\})">
+ <bygroups>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="varnames"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="[^{]+">
+ <token type="Other"/>
+ </rule>
+ <rule pattern="\{\{">
+ <token type="CommentPreproc"/>
+ <push state="var"/>
+ </rule>
+ <rule pattern="\{\#.*?\#\}">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(\{%)(-?\s*)(raw)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endraw)(\s*-?)(%\})">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ <token type="Other"/>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{%)(-?\s*)(verbatim)(\s*-?)(%\})(.*?)(\{%)(-?\s*)(endverbatim)(\s*-?)(%\})">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ <token type="Other"/>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="CommentPreproc"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{%)(-?\s*)(filter)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="(\{%)(-?\s*)([a-zA-Z_]\w*)">
+ <bygroups>
+ <token type="CommentPreproc"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="\{">
+ <token type="Other"/>
+ </rule>
+ </state>
+ <state name="varnames">
+ <rule pattern="(\|)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(is)(\s+)(not)?(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameFunction"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?i)(true|false|none|null)\b">
+ <token type="KeywordPseudo"/>
+ </rule>
+ <rule pattern="(in|not|and|b-and|or|b-or|b-xor|isif|elseif|else|importconstant|defined|divisibleby|empty|even|iterable|odd|sameasmatches|starts\s+with|ends\s+with)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(loop|block|parent)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\.(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w-]|[^\x00-\x7f])*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="\.[0-9]+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern=":?"(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern=":?'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="([{}()\[\]+\-*/,:~%]|\.\.|\?|:|\*\*|\/\/|!=|[><=]=?)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?">
+ <token type="LiteralNumber"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,295 @@
+<lexer>
+ <config>
+ <name>TypeScript</name>
+ <alias>ts</alias>
+ <alias>tsx</alias>
+ <alias>typescript</alias>
+ <filename>*.ts</filename>
+ <filename>*.tsx</filename>
+ <filename>*.mts</filename>
+ <filename>*.cts</filename>
+ <mime_type>text/x-typescript</mime_type>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="expression">
+ <rule pattern="{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="jsx">
+ <rule pattern="(<)(/?)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Punctuation"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<)([\w\.]+)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="(<)(/)([\w\.]*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="tag">
+ <rule>
+ <include state="jsx"/>
+ </rule>
+ <rule pattern=",">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <push state="interp"/>
+ </rule>
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="([\w-]+\s*)(=)(\s*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="attr"/>
+ </rule>
+ <rule pattern="[{}]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[\w\.]+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(/?)(\s*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^-]+">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="-->">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="-">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="badregex">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="interp">
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\\`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interp-inside"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="[^`\\$]+">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ <state name="attr">
+ <rule pattern="{">
+ <token type="Punctuation"/>
+ <push state="expression"/>
+ </rule>
+ <rule pattern="".*?"">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="'.*?'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="interp-inside">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gim]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=/)">
+ <token type="Text"/>
+ <push state="badregex"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="jsx"/>
+ </rule>
+ <rule pattern="^(?=\s|/|<!--)">
+ <token type="Text"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(for|in|of|while|do|break|return|yield|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|keyof|asserts|is|infer|await|void|this)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(var|let|with|function)\b">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(abstract|async|boolean|class|const|debugger|enum|export|extends|from|get|global|goto|implements|import|interface|namespace|package|private|protected|public|readonly|require|set|static|super|type)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false|null|NaN|Infinity|undefined)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(Array|Boolean|Date|Error|Function|Math|Number|Object|Packages|RegExp|String|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="\b(module)(\s+)("[\w\./@]+")(\s+)">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ <token type="NameOther"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="\b(string|bool|number|any|never|object|symbol|unique|unknown|bigint)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="\b(constructor|declare|interface|as)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(super)(\s*)(\([\w,?.$\s]+\s*\))">
+ <bygroups>
+ <token type="KeywordReserved"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="([a-zA-Z_?.$][\w?.$]*)\(\) \{">
+ <token type="NameOther"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)">
+ <bygroups>
+ <token type="NameOther"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[$a-zA-Z_]\w*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <push state="interp"/>
+ </rule>
+ <rule pattern="@\w+">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,178 @@
+<lexer>
+ <config>
+ <name>TypoScript</name>
+ <alias>typoscript</alias>
+ <filename>*.ts</filename>
+ <mime_type>text/x-typoscript</mime_type>
+ <dot_all>true</dot_all>
+ <priority>0.1</priority>
+ </config>
+ <rules>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="html">
+ <rule pattern="<\S[^\n>]*>">
+ <using lexer="TypoScriptHTMLData"/>
+ </rule>
+ <rule pattern="&[^;\n]*;">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(_CSS_DEFAULT_STYLE)(\s*)(\()(?s)(.*(?=\n\)))">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="LiteralStringSymbol"/>
+ <using lexer="TypoScriptCSSData"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="operator">
+ <rule pattern="[<>,:=.*%+|]">
+ <token type="Operator"/>
+ </rule>
+ </state>
+ <state name="structure">
+ <rule pattern="[{}()\[\]\\]">
+ <token type="LiteralStringSymbol"/>
+ </rule>
+ </state>
+ <state name="constant">
+ <rule pattern="(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ <token type="Operator"/>
+ <token type="NameConstant"/>
+ <token type="NameConstant"/>
+ <token type="LiteralStringSymbol"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ <token type="NameConstant"/>
+ <token type="Operator"/>
+ <token type="NameConstant"/>
+ <token type="LiteralStringSymbol"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(#[a-fA-F0-9]{6}\b|#[a-fA-F0-9]{3}\b)">
+ <token type="LiteralStringChar"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="(?<!(#|\'|"))(?:#(?!(?:[a-fA-F0-9]{6}|[a-fA-F0-9]{3}))[^\n#]+|//[^\n]*)">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="/\*(?:(?!\*/).)*\*/">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(\s*#\s*\n)">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="comment"/>
+ </rule>
+ <rule>
+ <include state="constant"/>
+ </rule>
+ <rule>
+ <include state="html"/>
+ </rule>
+ <rule>
+ <include state="label"/>
+ </rule>
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="punctuation"/>
+ </rule>
+ <rule>
+ <include state="operator"/>
+ </rule>
+ <rule>
+ <include state="structure"/>
+ </rule>
+ <rule>
+ <include state="literal"/>
+ </rule>
+ <rule>
+ <include state="other"/>
+ </rule>
+ </state>
+ <state name="literal">
+ <rule pattern="0x[0-9A-Fa-f]+t?">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(###\w+###)">
+ <token type="NameConstant"/>
+ </rule>
+ </state>
+ <state name="label">
+ <rule pattern="(EXT|FILE|LLL):[^}\n"]*">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(?![^\w\-])([\w\-]+(?:/[\w\-]+)+/?)(\S*\n)">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="punctuation">
+ <rule pattern="[,.]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ <state name="other">
+ <rule pattern="[\w"\-!/&;]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(\[)(?i)(browser|compatVersion|dayofmonth|dayofweek|dayofyear|device|ELSE|END|GLOBAL|globalString|globalVar|hostname|hour|IP|language|loginUser|loginuser|minute|month|page|PIDinRootline|PIDupinRootline|system|treeLevel|useragent|userFunc|usergroup|version)([^\]]*)(\])">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ <token type="NameConstant"/>
+ <token type="Text"/>
+ <token type="LiteralStringSymbol"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?=[\w\-])(HTMLparser|HTMLparser_tags|addParams|cache|encapsLines|filelink|if|imageLinkWrap|imgResource|makelinks|numRows|numberFormat|parseFunc|replacement|round|select|split|stdWrap|strPad|tableStyle|tags|textStyle|typolink)(?![\w\-])">
+ <token type="NameFunction"/>
+ </rule>
+ <rule pattern="(?:(=?\s*<?\s+|^\s*))(cObj|field|config|content|constants|FEData|file|frameset|includeLibs|lib|page|plugin|register|resources|sitemap|sitetitle|styles|temp|tt_[^:.\s]*|types|xmlnews|INCLUDE_TYPOSCRIPT|_CSS_DEFAULT_STYLE|_DEFAULT_PI_VARS|_LOCAL_LANG)(?![\w\-])">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="NameBuiltin"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(?=[\w\-])(CASE|CLEARGIF|COA|COA_INT|COBJ_ARRAY|COLUMNS|CONTENT|CTABLE|EDITPANEL|FILE|FILES|FLUIDTEMPLATE|FORM|HMENU|HRULER|HTML|IMAGE|IMGTEXT|IMG_RESOURCE|LOAD_REGISTER|MEDIA|MULTIMEDIA|OTABLE|PAGE|QTOBJECT|RECORDS|RESTORE_REGISTER|SEARCHRESULT|SVG|SWFOBJECT|TEMPLATE|TEXT|USER|USER_INT)(?![\w\-])">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(?=[\w\-])(ACTIFSUBRO|ACTIFSUB|ACTRO|ACT|CURIFSUBRO|CURIFSUB|CURRO|CUR|IFSUBRO|IFSUB|NO|SPC|USERDEF1RO|USERDEF1|USERDEF2RO|USERDEF2|USRRO|USR)">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(?=[\w\-])(GMENU_FOLDOUT|GMENU_LAYERS|GMENU|IMGMENUITEM|IMGMENU|JSMENUITEM|JSMENU|TMENUITEM|TMENU_LAYERS|TMENU)">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(?=[\w\-])(PHP_SCRIPT(_EXT|_INT)?)">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(?=[\w\-])(userFunc)(?![\w\-])">
+ <token type="NameFunction"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,52 @@
+<lexer>
+ <config>
+ <name>TypoScriptCssData</name>
+ <alias>typoscriptcssdata</alias>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(.*)(###\w+###)(.*)">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="NameConstant"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ <token type="Operator"/>
+ <token type="NameConstant"/>
+ <token type="NameConstant"/>
+ <token type="LiteralStringSymbol"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(.*)(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})(.*)">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="LiteralStringSymbol"/>
+ <token type="NameConstant"/>
+ <token type="Operator"/>
+ <token type="NameConstant"/>
+ <token type="LiteralStringSymbol"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="/\*(?:(?!\*/).)*\*/">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(?<!(#|\'|"))(?:#(?!(?:[a-fA-F0-9]{6}|[a-fA-F0-9]{3}))[^\n#]+|//[^\n]*)">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="[<>,:=.*%+|]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[\w"\-!/&;(){}]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,52 @@
+<lexer>
+ <config>
+ <name>TypoScriptHtmlData</name>
+ <alias>typoscripthtmldata</alias>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(INCLUDE_TYPOSCRIPT)">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(EXT|FILE|LLL):[^}\n"]*">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(.*)(###\w+###)(.*)">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="NameConstant"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(\{)(\$)((?:[\w\-]+\.)*)([\w\-]+)(\})">
+ <bygroups>
+ <token type="LiteralStringSymbol"/>
+ <token type="Operator"/>
+ <token type="NameConstant"/>
+ <token type="NameConstant"/>
+ <token type="LiteralStringSymbol"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(.*)(\{)([\w\-]+)(\s*:\s*)([\w\-]+)(\})(.*)">
+ <bygroups>
+ <token type="LiteralString"/>
+ <token type="LiteralStringSymbol"/>
+ <token type="NameConstant"/>
+ <token type="Operator"/>
+ <token type="NameConstant"/>
+ <token type="LiteralStringSymbol"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[<>,:=.*%+|]">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[\w"\-!/&;(){}#]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,108 @@
+
+<lexer>
+ <config>
+ <name>Typst</name>
+ <alias>typst</alias>
+ <filename>*.typ</filename>
+ <mime_type>text/x-typst</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule><include state="markup"/></rule>
+ </state>
+ <state name="into_code">
+ <rule pattern="(\#let|\#set|\#show)\b"><token type="KeywordDeclaration"/><push state="inline_code"/></rule>
+ <rule pattern="(\#import|\#include)\b"><token type="KeywordNamespace"/><push state="inline_code"/></rule>
+ <rule pattern="(\#if|\#for|\#while|\#export)\b"><token type="KeywordReserved"/><push state="inline_code"/></rule>
+ <rule pattern="#\{"><token type="Punctuation"/><push state="code"/></rule>
+ <rule pattern="#\("><token type="Punctuation"/><push state="code"/></rule>
+ <rule pattern="(#[a-zA-Z_][a-zA-Z0-9_-]*)(\[)"><bygroups><token type="NameFunction"/><token type="Punctuation"/></bygroups><push state="markup"/></rule>
+ <rule pattern="(#[a-zA-Z_][a-zA-Z0-9_-]*)(\()"><bygroups><token type="NameFunction"/><token type="Punctuation"/></bygroups><push state="code"/></rule>
+ <rule pattern="(\#true|\#false|\#none|\#auto)\b"><token type="KeywordConstant"/></rule>
+ <rule pattern="#[a-zA-Z_][a-zA-Z0-9_]*"><token type="NameVariable"/></rule>
+ <rule pattern="#0x[0-9a-fA-F]+"><token type="LiteralNumberHex"/></rule>
+ <rule pattern="#0b[01]+"><token type="LiteralNumberBin"/></rule>
+ <rule pattern="#0o[0-7]+"><token type="LiteralNumberOct"/></rule>
+ <rule pattern="#[0-9]+[\.e][0-9]+"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="#[0-9]+"><token type="LiteralNumberInteger"/></rule>
+ </state>
+ <state name="markup">
+ <rule><include state="comment"/></rule>
+ <rule pattern="^\s*=+.*$"><token type="GenericHeading"/></rule>
+ <rule pattern="[*][^*]*[*]"><token type="GenericStrong"/></rule>
+ <rule pattern="_[^_]*_"><token type="GenericEmph"/></rule>
+ <rule pattern="\$"><token type="Punctuation"/><push state="math"/></rule>
+ <rule pattern="`[^`]*`"><token type="LiteralStringBacktick"/></rule>
+ <rule pattern="^(\s*)(-)(\s+)"><bygroups><token type="TextWhitespace"/><token type="Punctuation"/><token type="TextWhitespace"/></bygroups></rule>
+ <rule pattern="^(\s*)(\+)(\s+)"><bygroups><token type="TextWhitespace"/><token type="Punctuation"/><token type="TextWhitespace"/></bygroups></rule>
+ <rule pattern="^(\s*)([0-9]+\.)"><bygroups><token type="TextWhitespace"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="^(\s*)(/)(\s+)([^:]+)(:)"><bygroups><token type="TextWhitespace"/><token type="Punctuation"/><token type="TextWhitespace"/><token type="NameVariable"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="<[a-zA-Z_][a-zA-Z0-9_-]*>"><token type="NameLabel"/></rule>
+ <rule pattern="@[a-zA-Z_][a-zA-Z0-9_-]*"><token type="NameLabel"/></rule>
+ <rule pattern="\\#"><token type="Text"/></rule>
+ <rule><include state="into_code"/></rule>
+ <rule pattern="```(?:.|\n)*?```"><token type="LiteralStringBacktick"/></rule>
+ <rule pattern="https?://[0-9a-zA-Z~/%#&=\',;.+?]*"><token type="GenericEmph"/></rule>
+ <rule pattern="(\-\-\-|\\|\~|\-\-|\.\.\.)\B"><token type="Punctuation"/></rule>
+ <rule pattern="\\\["><token type="Punctuation"/></rule>
+ <rule pattern="\\\]"><token type="Punctuation"/></rule>
+ <rule pattern="\["><token type="Punctuation"/><push/></rule>
+ <rule pattern="\]"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern="[ \t]+\n?|\n"><token type="TextWhitespace"/></rule>
+ <rule pattern="((?![*_$`<@\\#\] ]|https?://).)+"><token type="Text"/></rule>
+ </state>
+ <state name="math">
+ <rule><include state="comment"/></rule>
+ <rule pattern="(\\_|\\\^|\\\&)"><token type="Text"/></rule>
+ <rule pattern="(_|\^|\&|;)"><token type="Punctuation"/></rule>
+ <rule pattern="(\+|/|=|\[\||\|\]|\|\||\*|:=|::=|\.\.\.|'|\-|=:|!=|>>|>=|>>>|<<|<=|<<<|\->|\|\->|=>|\|=>|==>|\-\->|\~\~>|\~>|>\->|\->>|<\-|<==|<\-\-|<\~\~|<\~|<\-<|<<\-|<\->|<=>|<==>|<\-\->|>|<|\~|:|\|)"><token type="Operator"/></rule>
+ <rule pattern="\\"><token type="Punctuation"/></rule>
+ <rule pattern="\\\$"><token type="Punctuation"/></rule>
+ <rule pattern="\$"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule><include state="into_code"/></rule>
+ <rule pattern="([a-zA-Z][a-zA-Z0-9-]*)(\s*)(\()"><bygroups><token type="NameFunction"/><token type="TextWhitespace"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="([a-zA-Z][a-zA-Z0-9-]*)(:)"><bygroups><token type="NameVariable"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="([a-zA-Z][a-zA-Z0-9-]*)"><token type="NameVariable"/></rule>
+ <rule pattern="[0-9]+(\.[0-9]+)?"><token type="LiteralNumber"/></rule>
+ <rule pattern="\.{1,3}|\(|\)|,|\{|\}"><token type="Punctuation"/></rule>
+ <rule pattern=""[^"]*""><token type="LiteralStringDouble"/></rule>
+ <rule pattern="[ \t\n]+"><token type="TextWhitespace"/></rule>
+ </state>
+ <state name="comment">
+ <rule pattern="//.*$"><token type="CommentSingle"/></rule>
+ <rule pattern="/[*](.|\n)*?[*]/"><token type="CommentMultiline"/></rule>
+ </state>
+ <state name="code">
+ <rule><include state="comment"/></rule>
+ <rule pattern="\["><token type="Punctuation"/><push state="markup"/></rule>
+ <rule pattern="\(|\{"><token type="Punctuation"/><push state="code"/></rule>
+ <rule pattern="\)|\}"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern=""[^"]*""><token type="LiteralStringDouble"/></rule>
+ <rule pattern=",|\.{1,2}"><token type="Punctuation"/></rule>
+ <rule pattern="="><token type="Operator"/></rule>
+ <rule pattern="(and|or|not)\b"><token type="OperatorWord"/></rule>
+ <rule pattern="=>|<=|==|!=|>|<|-=|\+=|\*=|/=|\+|-|\\|\*"><token type="Operator"/></rule>
+ <rule pattern="([a-zA-Z_][a-zA-Z0-9_-]*)(:)"><bygroups><token type="NameVariable"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="([a-zA-Z_][a-zA-Z0-9_-]*)(\()"><bygroups><token type="NameFunction"/><token type="Punctuation"/></bygroups><push state="code"/></rule>
+ <rule pattern="(as|break|export|continue|else|for|if|in|return|while)\b"><token type="KeywordReserved"/></rule>
+ <rule pattern="(import|include)\b"><token type="KeywordNamespace"/></rule>
+ <rule pattern="(auto|none|true|false)\b"><token type="KeywordConstant"/></rule>
+ <rule pattern="([0-9.]+)(mm|pt|cm|in|em|fr|%)"><bygroups><token type="LiteralNumber"/><token type="KeywordReserved"/></bygroups></rule>
+ <rule pattern="0x[0-9a-fA-F]+"><token type="LiteralNumberHex"/></rule>
+ <rule pattern="0b[01]+"><token type="LiteralNumberBin"/></rule>
+ <rule pattern="0o[0-7]+"><token type="LiteralNumberOct"/></rule>
+ <rule pattern="[0-9]+[\.e][0-9]+"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="[0-9]+"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern="(let|set|show)\b"><token type="KeywordDeclaration"/></rule>
+ <rule pattern="([a-zA-Z_][a-zA-Z0-9_-]*)"><token type="NameVariable"/></rule>
+ <rule pattern="[ \t\n]+"><token type="TextWhitespace"/></rule>
+ <rule pattern=":"><token type="Punctuation"/></rule>
+ </state>
+ <state name="inline_code">
+ <rule pattern=";\b"><token type="Punctuation"/><pop depth="1"/></rule>
+ <rule pattern="\n"><token type="TextWhitespace"/><pop depth="1"/></rule>
+ <rule><include state="code"/></rule>
+ </state>
+ </rules>
+</lexer>
+
@@ -0,0 +1,147 @@
+<lexer>
+ <config>
+ <name>ucode</name>
+ <filename>*.uc</filename>
+ <mime_type>application/x.ucode</mime_type>
+ <mime_type>text/x.ucode</mime_type>
+ <dot_all>true</dot_all>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="interp">
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\\`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\\[^`\\]">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interp-inside"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="[^`\\$]+">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ <state name="interp-inside">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gimuy]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=/)">
+ <token type="Text"/>
+ <push state="#pop" state="badregex"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="badregex">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\A#! ?/.*?\n">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="^(?=\s|/|<!--)">
+ <token type="Text"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="\d+(\.\d*|[eE][+\-]?\d+)">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[bB][01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[oO][0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9][0-9_]*">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\.\.\.|=>">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(import|export|from|as|for|in|while|break|return|continue|switch|case|default|if|else|try|catch|delete|this)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(const|let|function)\b">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(true|false|null|NaN|Infinity)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(?:[$_\p{L}\p{N}]|\\u[a-fA-F0-9]{4})(?:(?:[$\p{L}\p{N}]|\\u[a-fA-F0-9]{4}))*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <push state="interp"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,355 @@
+<lexer>
+ <config>
+ <name>V</name>
+ <alias>v</alias>
+ <alias>vlang</alias>
+ <filename>*.v</filename>
+ <filename>*.vv</filename>
+ <filename>v.mod</filename>
+ <mime_type>text/x-v</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(?<=module\s+\w[^\n]*\s+)(//[^\n]+\n)+(?=\n)">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule pattern="(// *)(\w+)([^\n]+\n)(?=(?://[^\n]*\n)* *(?:pub +)?(?:fn|struct|union|type|interface|enum|const) +\2\b)">
+ <bygroups>
+ <token type="LiteralStringDoc"/>
+ <token type="GenericEmph"/>
+ <token type="LiteralStringDoc"/>
+ </bygroups>
+ <push state="string-doc"/>
+ </rule>
+ <rule pattern="//[^\n]*\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*(?:(?:/\*(?:.|\n)*?\*/)*|.|\n)*\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="\b(import|module)\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="\b(fn|struct|union|map|chan|type|interface|enum|const|mut|shared|pub|__global)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="\?">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(?<=\)\s*)!">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="[ \t]*#include[^\n]+">
+ <using lexer="c"/>
+ </rule>
+ <rule pattern="[ \t]*#\w[^\n]*">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(sql)(\s+)(\w+)(\s+)({)([^}]*?)(})">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Name"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <using lexer="sql"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\$(?=\w)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(?<=\$)(?:embed_file|pkgconfig|tmpl|env|compile_error|compile_warn)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(asm)(\s+)(\w+)(\s*)({)([^}]*?)(})">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="KeywordType"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ <using lexer="nasm"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b_(?:un)?likely_(?=\()">
+ <token type="NameFunctionMagic"/>
+ </rule>
+ <rule pattern="(?<=\$if.+?(?:&&|\|\|)?)((no_segfault_handler|no_bounds_checking|little_endian|freestanding|no_backtrace|big_endian|cplusplus|dragonfly|prealloc|android|windows|no_main|solaris|darwin|clang|tinyc|glibc|mingw|haiku|macos|amd64|arm64|debug|linux|prod|msvc|test|hpux|mach|x32|x64|gcc|qnx|gnu|ios|mac|js))+">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="@(VMOD_FILE|VEXEROOT|VMODROOT|METHOD|STRUCT|COLUMN|VHASH|FILE|LINE|VEXE|MOD|FN)\b">
+ <token type="NameVariableMagic"/>
+ </rule>
+ <rule pattern="\b(?<!@)(__offsetof|isreftype|continue|volatile|typeof|static|unsafe|return|assert|sizeof|atomic|select|match|break|defer|rlock|lock|else|goto|for|in|is|as|or|if|go)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(?<!@)(none|true|false|si_s_code|si_g32_code|si_g64_code)\b">
+ <token type="KeywordConstant"/>
+ </rule>
@@ -0,0 +1,365 @@
+<lexer>
+ <config>
+ <name>V shell</name>
+ <alias>vsh</alias>
+ <alias>vshell</alias>
+ <filename>*.vsh</filename>
+ <mime_type>text/x-vsh</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="attribute">
+ <rule pattern="\]">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="'">
+ <token type="Punctuation"/>
+ <push state="string-single"/>
+ </rule>
+ <rule pattern=""">
+ <token type="Punctuation"/>
+ <push state="string-double"/>
+ </rule>
+ <rule pattern="[;:]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(?<=\[)if\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(?<=: *)\w+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="[^\W\d]\w*">
+ <token type="NameAttribute"/>
+ </rule>
+ </state>
+ <state name="string-double">
+ <rule pattern=""">
+ <token type="LiteralStringDouble"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="char-escape"/>
+ </rule>
+ <rule pattern="(\$)((?!\\){)">
+ <bygroups>
+ <token type="Operator"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <push state="string-curly-interpolation"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="Operator"/>
+ <push state="string-interpolation"/>
+ </rule>
+ <rule pattern="[^"]+?">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ </state>
+ <state name="char">
+ <rule pattern="`">
+ <token type="LiteralStringChar"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="char-escape"/>
+ </rule>
+ <rule pattern="[^\\]">
+ <token type="LiteralStringChar"/>
+ </rule>
+ </state>
+ <state name="string-doc">
+ <rule pattern="(// *)(#+ [^\n]+)(\n)">
+ <bygroups>
+ <token type="LiteralStringDoc"/>
+ <token type="GenericHeading"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="// *([=_*~-])\1{2,}\n">
+ <token type="LiteralStringDelimiter"/>
+ </rule>
+ <rule pattern="//[^\n]*\n">
+ <token type="LiteralStringDoc"/>
+ </rule>
+ <rule>
+ <mutators>
+ <pop depth="1"/>
+ </mutators>
+ </rule>
+ </state>
+ <state name="string-interpolation">
+ <rule pattern="(\.)?(@)?(?:([^\W\d]\w*)(\()([^)]*)(\))|([^\W\d]\w*))">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Operator"/>
+ <token type="NameFunction"/>
+ <token type="Punctuation"/>
+ <usingself state="root"/>
+ <token type="Punctuation"/>
+ <token type="NameVariable"/>
+ </bygroups>
+ </rule>
+ <rule>
+ <mutators>
+ <pop depth="1"/>
+ </mutators>
+ </rule>
+ </state>
+ <state name="string-curly-interpolation">
+ <rule pattern="}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="strings"/>
+ </rule>
+ <rule pattern="(:)( *?)([ 0'#+-])?(?:(\.)?([0-9]+))?([fFgeEGxXobsd])?">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Punctuation"/>
+ <token type="LiteralNumber"/>
+ <token type="LiteralStringAffix"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^}"':]+">
+ <usingself state="root"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^#![^\n]*\n">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="\b(path_delimiter|path_separator|wd_at_startup|max_path_len|sys_write|sys_close|sys_mkdir|sys_creat|sys_open|s_iflnk|s_irusr|s_ifdir|s_ixoth|s_isuid|s_isgid|s_isvtx|s_iwoth|s_iwusr|s_ixusr|s_irgrp|s_iwgrp|s_ixgrp|s_iroth|s_ifmt|args)\b">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="\b(ExecutableNotFoundError|FileNotOpenedError|SizeOfTypeIs0Error|ProcessState|SeekMode|Command|Process|Signal|Result|Uname|File)\b">
+ <token type="NameBuiltin"/>
+ </rule>
@@ -0,0 +1,72 @@
+
+<lexer>
+ <config>
+ <name>Vala</name>
+ <alias>vala</alias>
+ <alias>vapi</alias>
+ <filename>*.vala</filename>
+ <filename>*.vapi</filename>
+ <mime_type>text/x-vala</mime_type>
+ </config>
+ <rules>
+ <state name="whitespace">
+ <rule pattern="^\s*#if\s+0"><token type="CommentPreproc"/><push state="if0"/></rule>
+ <rule pattern="\n"><token type="TextWhitespace"/></rule>
+ <rule pattern="\s+"><token type="TextWhitespace"/></rule>
+ <rule pattern="\\\n"><token type="Text"/></rule>
+ <rule pattern="//(\n|(.|\n)*?[^\\]\n)"><token type="CommentSingle"/></rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/"><token type="CommentMultiline"/></rule>
+ </state>
+ <state name="statements">
+ <rule pattern="[L@]?""><token type="LiteralString"/><push state="string"/></rule>
+ <rule pattern="L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'"><token type="LiteralStringChar"/></rule>
+ <rule pattern="(?s)""".*?""""><token type="LiteralString"/></rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?"><token type="LiteralNumberFloat"/></rule>
+ <rule pattern="0x[0-9a-fA-F]+[Ll]?"><token type="LiteralNumberHex"/></rule>
+ <rule pattern="0[0-7]+[Ll]?"><token type="LiteralNumberOct"/></rule>
+ <rule pattern="\d+[Ll]?"><token type="LiteralNumberInteger"/></rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]"><token type="Operator"/></rule>
+ <rule pattern="(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])"><bygroups><token type="Punctuation"/><token type="NameDecorator"/><token type="Punctuation"/></bygroups></rule>
+ <rule pattern="(\[)(CCode|(?:Integer|Floating)Type)"><bygroups><token type="Punctuation"/><token type="NameDecorator"/></bygroups></rule>
+ <rule pattern="[()\[\],.]"><token type="Punctuation"/></rule>
+ <rule pattern="(as|base|break|case|catch|construct|continue|default|delete|do|else|enum|finally|for|foreach|get|if|in|is|lock|new|out|params|return|set|sizeof|switch|this|throw|try|typeof|while|yield)\b"><token type="Keyword"/></rule>
+ <rule pattern="(abstract|const|delegate|dynamic|ensures|extern|inline|internal|override|owned|private|protected|public|ref|requires|signal|static|throws|unowned|var|virtual|volatile|weak|yields)\b"><token type="KeywordDeclaration"/></rule>
+ <rule pattern="(namespace|using)(\s+)"><bygroups><token type="KeywordNamespace"/><token type="TextWhitespace"/></bygroups><push state="namespace"/></rule>
+ <rule pattern="(class|errordomain|interface|struct)(\s+)"><bygroups><token type="KeywordDeclaration"/><token type="TextWhitespace"/></bygroups><push state="class"/></rule>
+ <rule pattern="(\.)([a-zA-Z_]\w*)"><bygroups><token type="Operator"/><token type="NameAttribute"/></bygroups></rule>
+ <rule pattern="(void|bool|char|double|float|int|int8|int16|int32|int64|long|short|size_t|ssize_t|string|time_t|uchar|uint|uint8|uint16|uint32|uint64|ulong|unichar|ushort)\b"><token type="KeywordType"/></rule>
+ <rule pattern="(true|false|null)\b"><token type="NameBuiltin"/></rule>
+ <rule pattern="[a-zA-Z_]\w*"><token type="Name"/></rule>
+ </state>
+ <state name="root">
+ <rule><include state="whitespace"/></rule>
+ <rule><push state="statement"/></rule>
+ </state>
+ <state name="statement">
+ <rule><include state="whitespace"/></rule>
+ <rule><include state="statements"/></rule>
+ <rule pattern="[{}]"><token type="Punctuation"/></rule>
+ <rule pattern=";"><token type="Punctuation"/><pop depth="1"/></rule>
+ </state>
+ <state name="string">
+ <rule pattern="""><token type="LiteralString"/><pop depth="1"/></rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})"><token type="LiteralStringEscape"/></rule>
+ <rule pattern="[^\\"\n]+"><token type="LiteralString"/></rule>
+ <rule pattern="\\\n"><token type="LiteralString"/></rule>
+ <rule pattern="\\"><token type="LiteralString"/></rule>
+ </state>
+ <state name="if0">
+ <rule pattern="^\s*#if.*?(?<!\\)\n"><token type="CommentPreproc"/><push/></rule>
+ <rule pattern="^\s*#el(?:se|if).*\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
+ <rule pattern="^\s*#endif.*?(?<!\\)\n"><token type="CommentPreproc"/><pop depth="1"/></rule>
+ <rule pattern=".*?\n"><token type="Comment"/></rule>
+ </state>
+ <state name="class">
+ <rule pattern="[a-zA-Z_]\w*"><token type="NameClass"/><pop depth="1"/></rule>
+ </state>
+ <state name="namespace">
+ <rule pattern="[a-zA-Z_][\w.]*"><token type="NameNamespace"/><pop depth="1"/></rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,162 @@
+<lexer>
+ <config>
+ <name>VB.net</name>
+ <alias>vb.net</alias>
+ <alias>vbnet</alias>
+ <filename>*.vb</filename>
+ <filename>*.bas</filename>
+ <mime_type>text/x-vbnet</mime_type>
+ <mime_type>text/x-vba</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="dim">
+ <rule pattern="[_\w][\w]*">
+ <token type="NameVariable"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="funcname">
+ <rule pattern="[_\w][\w]*">
+ <token type="NameFunction"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="classname">
+ <rule pattern="[_\w][\w]*">
+ <token type="NameClass"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="namespace">
+ <rule pattern="[_\w][\w]*">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="\.">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="end">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(Function|Sub|Property|Class|Structure|Enum|Module|Namespace)\b">
+ <token type="Keyword"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="^\s*<.*?>">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="rem\b.*?\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="'.*?\n">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="#If\s.*?\sThen|#ElseIf\s.*?\sThen|#Else|#End\s+If|#Const|#ExternalSource.*?\n|#End\s+ExternalSource|#Region.*?\n|#End\s+Region|#ExternalChecksum">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="[(){}!#,.:]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="Option\s+(Strict|Explicit|Compare)\s+(On|Off|Binary|Text)">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(?<!\.)(NotOverridable|NotInheritable|RemoveHandler|MustOverride|Overridable|MustInherit|Implements|RaiseEvent|AddHandler|ParamArray|WithEvents|DirectCast|Overrides|Overloads|Protected|WriteOnly|Interface|Narrowing|Inherits|Widening|SyncLock|ReadOnly|Operator|Continue|Delegate|Optional|MyClass|Declare|CUShort|Handles|Default|Shadows|TryCast|Finally|Private|Nothing|Partial|CSByte|Select|Option|Return|Friend|Resume|ElseIf|MyBase|Shared|Single|Public|CShort|Static|Global|Catch|CType|Error|CUInt|Using|While|GoSub|False|CDate|Throw|Event|CChar|CULng|CBool|Erase|ByVal|ByRef|Alias|EndIf|CByte|ReDim|Stop|Call|Wend|Next|CLng|Loop|True|CDec|With|Then|GoTo|CObj|CSng|Exit|CStr|Else|Each|Case|CInt|Step|When|CDbl|Set|For|Let|Lib|Try|New|Not|Get|On|To|Do|If|Of|Me)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(?<!\.)End\b">
+ <token type="Keyword"/>
+ <push state="end"/>
+ </rule>
+ <rule pattern="(?<!\.)(Dim|Const)\b">
+ <token type="Keyword"/>
+ <push state="dim"/>
+ </rule>
+ <rule pattern="(?<!\.)(Function|Sub|Property)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="funcname"/>
+ </rule>
+ <rule pattern="(?<!\.)(Class|Structure|Enum)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="classname"/>
+ </rule>
+ <rule pattern="(?<!\.)(Module|Namespace|Imports)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="namespace"/>
+ </rule>
+ <rule pattern="(?<!\.)(Boolean|Byte|Char|Date|Decimal|Double|Integer|Long|Object|SByte|Short|Single|String|Variant|UInteger|ULong|UShort)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(?<!\.)(AddressOf|And|AndAlso|As|GetType|In|Is|IsNot|Like|Mod|Or|OrElse|TypeOf|Xor)\b">
+ <token type="OperatorWord"/>
+ </rule>
+ <rule pattern="&=|[*]=|/=|\\=|\^=|\+=|-=|<<=|>>=|<<|>>|:=|<=|>=|<>|[-&*/\\^+=<>\[\]]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="_\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[_\w][\w]*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="#.*?#">
+ <token type="LiteralDate"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\d*\.\d+)(F[+-]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="\d+([SILDFR]|US|UI|UL)?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="&H[0-9a-f]+([SILDFR]|US|UI|UL)?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="&O[0-7]+([SILDFR]|US|UI|UL)?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern="""">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""C?">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^"]+">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,158 @@
+<lexer>
+ <config>
+ <name>verilog</name>
+ <alias>verilog</alias>
+ <alias>v</alias>
+ <filename>*.v</filename>
+ <mime_type>text/x-verilog</mime_type>
+ <ensure_nl>true</ensure_nl>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^\s*`define">
+ <token type="CommentPreproc"/>
+ <push state="macro"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="/(\\\n)?/(\n|(.|\n)*?[^\\]\n)">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*](.|\n)*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="[{}#@]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="L?"">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+[fF])[fF]?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="([0-9]+)|(\'h)[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="([0-9]+)|(\'b)[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="([0-9]+)|(\'d)[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="([0-9]+)|(\'o)[0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="\'[01xz]">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\d+[Ll]?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="Error"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[()\[\],.;\']">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="`[a-zA-Z_]\w*">
+ <token type="NameConstant"/>
+ </rule>
+ <rule pattern="^(\s*)(package)(\s+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^(\s*)(import)(\s+)">
+ <bygroups>
+ <token type="Text"/>
+ <token type="KeywordNamespace"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="import"/>
+ </rule>
+ <rule pattern="(endprimitive|always_latch|macromodule|always_comb|endgenerate|endfunction|endpackage|endspecify|localparam|parameter|primitive|always_ff|automatic|specparam|endmodule|rtranif1|scalared|continue|deassign|endtable|defparam|function|strength|generate|pulldown|vectored|rtranif0|unsigned|specify|endcase|negedge|strong0|disable|default|endtask|posedge|strong1|typedef|tranif1|integer|forever|release|initial|tranif0|highz0|genvar|highz1|pullup|notif0|bufif1|bufif0|repeat|medium|return|struct|assign|signed|module|packed|string|output|notif1|always|final|casex|while|table|const|large|break|begin|input|pull0|pull1|inout|weak1|rcmos|weak0|casez|force|small|rnmos|rpmos|rtran|event|type|void|enum|wait|fork|join|else|edge|pmos|nand|cmos|nmos|task|xnor|case|tran|buf|ref|end|var|and|xor|for|nor|not|do|if|or)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="`(autoexpand_vectornets|nounconnected_drive|noexpand_vectornets|noremove_gatenames|unconnected_drive|noremove_netnames|expand_vectornets|remove_gatenames|default_nettype|remove_netnames|endcelldefine|noaccelerate|endprotected|accelerate|celldefine|endprotect|protected|timescale|resetall|protect|include|ifndef|ifdef|endif|elsif|undef|else)\b">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\$(shortrealtobits|bitstoshortreal|printtimescale|showvariables|countdrivers|reset_value|reset_count|getpattern|showscopes|realtobits|bitstoreal|monitoroff|timeformat|sreadmemh|monitoron|sreadmemb|fmonitor|showvars|fdisplay|realtime|readmemb|readmemh|monitor|history|fstrobe|display|restart|incsave|strobe|fwrite|finish|random|fclose|stime|nokey|fopen|floor|nolog|scale|scope|input|reset|write|rtoi|bits|list|stop|itor|time|save|key|log)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(woshortreal|shortint|realtime|longint|integer|supply0|supply1|triand|trireg|uwire|logic|trior|byte|wand|tri0|tri1|time|real|wire|reg|bit|int|tri)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*:(?!:)">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="\$?[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="string">
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\">
+ <token type="LiteralString"/>
+ </rule>
+ </state>
+ <state name="macro">
+ <rule pattern="[^/\n]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="/">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="(?<=\\)\n">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="CommentPreproc"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="import">
+ <rule pattern="[\w:]+\*?">
+ <token type="NameNamespace"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,171 @@
+<lexer>
+ <config>
+ <name>VHDL</name>
+ <alias>vhdl</alias>
+ <filename>*.vhdl</filename>
+ <filename>*.vhd</filename>
+ <mime_type>text/x-vhdl</mime_type>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="--.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="'(U|X|0|1|Z|W|L|H|-)'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="[~!%^&*+=|?:<>/-]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="'[a-z_]\w*">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="[()\[\],.;\']">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=""[^\n\\"]*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(library)(\s+)([a-z_]\w*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(use)(\s+)(entity)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(use)(\s+)([a-z_][\w.]*\.)(all)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(use)(\s+)([a-z_][\w.]*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(std|ieee)(\.[a-z_]\w*)">
+ <bygroups>
+ <token type="NameNamespace"/>
+ <token type="NameNamespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(ieee|work|std)\b">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="(entity|component)(\s+)([a-z_]\w*)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(architecture|configuration)(\s+)([a-z_]\w*)(\s+)(of)(\s+)([a-z_]\w*)(\s+)(is)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="NameClass"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([a-z_]\w*)(:)(\s+)(process|for)">
+ <bygroups>
+ <token type="NameClass"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="Keyword"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(end)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="endblock"/>
+ </rule>
+ <rule>
+ <include state="types"/>
+ </rule>
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule>
+ <include state="numbers"/>
+ </rule>
+ <rule pattern="[a-z_]\w*">
+ <token type="Name"/>
+ </rule>
+ </state>
+ <state name="endblock">
+ <rule>
+ <include state="keywords"/>
+ </rule>
+ <rule pattern="[a-z_]\w*">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(\s+)">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=";">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="types">
+ <rule pattern="(std_ulogic_vector|file_open_status|std_logic_vector|severity_level|file_open_kind|delay_length|std_ulogic|bit_vector|character|std_logic|positive|unsigned|boolean|natural|integer|signed|string|time|bit)\b">
+ <token type="KeywordType"/>
+ </rule>
+ </state>
+ <state name="keywords">
+ <rule pattern="(configuration|architecture|disconnect|attribute|transport|postponed|procedure|component|function|variable|severity|constant|generate|register|inertial|package|library|guarded|linkage|generic|subtype|process|literal|record|entity|others|shared|signal|downto|access|assert|return|reject|buffer|impure|select|elsif|inout|until|label|range|group|units|begin|array|alias|after|block|while|null|next|file|when|wait|open|nand|exit|then|case|port|type|loop|else|pure|with|xnor|body|not|rem|bus|rol|ror|xor|abs|end|and|sla|sll|sra|srl|all|out|nor|mod|map|for|new|use|or|on|of|in|if|is|to)\b">
+ <token type="Keyword"/>
+ </rule>
+ </state>
+ <state name="numbers">
+ <rule pattern="\d{1,2}#[0-9a-f_]+#?">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(\d+\.\d*|\.\d+|\d+)E[+-]?\d+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="X"[0-9a-f_]+"">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="O"[0-7_]+"">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="B"[01_]+"">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,48 @@
+<lexer>
+ <config>
+ <name>VHS</name>
+ <alias>vhs</alias>
+ <alias>tape</alias>
+ <alias>cassette</alias>
+ <filename>*.tape</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="(Output)(\s+)(.*)(\s+)">
+ <bygroups>
+ <token type="Keyword"/>
+ <token type="TextWhitespace"/>
+ <token type="LiteralString"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="\b(Set|Type|Left|Right|Up|Down|Backspace|Enter|Tab|Space|Ctrl|Sleep|Hide|Show|Escape)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(FontFamily|FontSize|Framerate|Height|Width|Theme|Padding|TypingSpeed|PlaybackSpeed|LineHeight|Framerate|LetterSpacing)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="#.*(\S|$)">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="(?s)".*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="(?s)'.*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="(@|\+)">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="(ms|s)">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,85 @@
+<lexer>
+ <config>
+ <name>VimL</name>
+ <alias>vim</alias>
+ <filename>*.vim</filename>
+ <filename>.vimrc</filename>
+ <filename>.exrc</filename>
+ <filename>.gvimrc</filename>
+ <filename>_vimrc</filename>
+ <filename>_exrc</filename>
+ <filename>_gvimrc</filename>
+ <filename>vimrc</filename>
+ <filename>gvimrc</filename>
+ <mime_type>text/x-vim</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="^([ \t:]*)(py(?:t(?:h(?:o(?:n)?)?)?)?)([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ <token type="Text"/>
+ <using lexer="Python"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^([ \t:]*)(py(?:t(?:h(?:o(?:n)?)?)?)?)([ \t])(.*)">
+ <bygroups>
+ <usingself state="root"/>
+ <token type="Keyword"/>
+ <token type="Text"/>
+ <using lexer="Python"/>
+ </bygroups>
+ </rule>
+ <rule pattern="^\s*".*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="[ \t]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="/(\\\\|\\/|[^\n/])*/">
+ <token type="LiteralStringRegex"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^\n"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(''|[^\n'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="(?<=\s)"[^\-:.%#=*].*">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="-?\d+">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="#[0-9a-f]{6}">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="^:">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[()<>+=!|,~-]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\b(let|if|else|endif|elseif|fun|function|endfunction|set|map|autocmd|filetype|hi(ghlight)?|execute|syntax|colorscheme)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="\b(NONE|bold|italic|underline|dark|light)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="\b\w+\b">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,307 @@
+<lexer>
+ <config>
+ <name>vue</name>
+ <alias>vue</alias>
+ <alias>vuejs</alias>
+ <filename>*.vue</filename>
+ <mime_type>text/x-vue</mime_type>
+ <mime_type>application/x-vue</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="interp-inside">
+ <rule pattern="\}">
+ <token type="LiteralStringInterpol"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="attr">
+ <rule pattern="{">
+ <token type="Punctuation"/>
+ <push state="expression"/>
+ </rule>
+ <rule pattern="".*?"">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="'.*?'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="interp">
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="\\\\">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\\`">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="\$\{">
+ <token type="LiteralStringInterpol"/>
+ <push state="interp-inside"/>
+ </rule>
+ <rule pattern="\$">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ <rule pattern="[^`\\$]+">
+ <token type="LiteralStringBacktick"/>
+ </rule>
+ </state>
+ <state name="tag">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="(-)([\w]+)">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern="(@[\w]+)(="[\S]+")(>)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(@[\w]+)(="[\S]+")">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(@[\S]+)(="[\S]+")">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(:[\S]+)(=)("[\S]+")">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Operator"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(:)">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="(v-b-[\S]+)">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern="(v-[\w]+)(=".+)([:][\w]+)(="[\w]+")(>)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="LiteralString"/>
+ <token type="NameTag"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(v-[\w]+)(=)("[\S ]+")(>|\s)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Operator"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(v-[\w]+)(>)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(v-[\w]+)(=".+")(>)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="LiteralString"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<)([\w]+)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(<)(/)([\w]+)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([\w]+\s*)(=)(\s*)">
+ <bygroups>
+ <token type="NameAttribute"/>
+ <token type="Operator"/>
+ <token type="Text"/>
+ </bygroups>
+ <push state="attr"/>
+ </rule>
+ <rule pattern="[{}]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="[\w\.]+">
+ <token type="NameAttribute"/>
+ </rule>
+ <rule pattern="(/?)(\s*)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="slashstartsregex">
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/([gimuy]+\b|\B)">
+ <token type="LiteralStringRegex"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="(?=/)">
+ <token type="Text"/>
+ <push state="#pop" state="badregex"/>
+ </rule>
+ <rule>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule>
+ <include state="vue"/>
+ </rule>
+ <rule pattern="\A#! ?/.*?\n">
+ <token type="CommentHashbang"/>
+ </rule>
+ <rule pattern="^(?=\s|/|<!--)">
+ <token type="Text"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule>
+ <include state="commentsandwhitespace"/>
+ </rule>
+ <rule pattern="(\.\d+|[0-9]+\.[0-9]*)([eE][-+]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0[bB][01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[oO][0-7]+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="\.\.\.|=>">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?">
+ <token type="Operator"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[{(\[;,]">
+ <token type="Punctuation"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="[})\].]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|yield|this|of)\b">
+ <token type="Keyword"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(var|let|with|function)\b">
+ <token type="KeywordDeclaration"/>
+ <push state="slashstartsregex"/>
+ </rule>
+ <rule pattern="(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(true|false|null|NaN|Infinity|undefined)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|Promise|Proxy|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|isSafeInteger|parseFloat|parseInt|document|this|window)\b">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(?:[$_\p{L}\p{N}]|\\u[a-fA-F0-9]{4})(?:(?:[$\p{L}\p{N}]|\\u[a-fA-F0-9]{4}))*">
+ <token type="NameOther"/>
+ </rule>
+ <rule pattern=""(\\\\|\\"|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(\\\\|\\'|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="`">
+ <token type="LiteralStringBacktick"/>
+ <push state="interp"/>
+ </rule>
+ </state>
+ <state name="badregex">
+ <rule pattern="\n">
+ <token type="Text"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="vue">
+ <rule pattern="(<)([\w-]+)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ </bygroups>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="(<)(/)([\w-]+)(>)">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="Punctuation"/>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="expression">
+ <rule pattern="{">
+ <token type="Punctuation"/>
+ <push/>
+ </rule>
+ <rule pattern="}">
+ <token type="Punctuation"/>
+ <pop depth="1"/>
+ </rule>
+ <rule>
+ <include state="root"/>
+ </rule>
+ </state>
+ <state name="commentsandwhitespace">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*.*?\*/">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,43 @@
+<lexer>
+ <config>
+ <name>WDTE</name>
+ <filename>*.wdte</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#(.*?)\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="-?[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="-?[0-9]*\.[0-9]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern=""[^"]*"">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="'[^']*'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="(default|switch|memo)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="{|}|;|->|=>|\(|\)|\[|\]|\.">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[^{};()[\].\s]+">
+ <token type="NameVariable"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,142 @@
+<lexer>
+ <config>
+ <name>WebGPU Shading Language</name>
+ <alias>wgsl</alias>
+ <filename>*.wgsl</filename>
+ <mime_type>text/wgsl</mime_type>
+ </config>
+ <rules>
+ <state name="blankspace">
+ <rule pattern="[\u0020\u0009\u000a\u000b\u000c\u000d\u0085\u200e\u200f\u2028\u2029]+">
+ <token type="TextWhitespace"/>
+ </rule>
+ </state>
+ <state name="comments">
+ <rule pattern="//[^\u000a\u000b\u000c\u000d\u0085\u2028\u2029]*\u000d\u000a">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="//[^\u000a\u000b\u000c\u000d\u0085\u2028\u2029]*[\u000a\u000b\u000c\u000d\u0085\u2028\u2029]">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="block_comment"/>
+ </rule>
+ </state>
+ <state name="attribute">
+ <rule>
+ <include state="blankspace"/>
+ </rule>
+ <rule>
+ <include state="comments"/>
+ </rule>
@@ -0,0 +1,283 @@
+<lexer>
+ <config>
+ <name>WebVTT</name>
+ <alias>vtt</alias>
+ <filename>*.vtt</filename>
+ <mime_type>text/vtt</mime_type>
+ </config>
+ <!--
+ The WebVTT spec refers to a WebVTT line terminator as either CRLF, CR or LF.
+ (https://www.w3.org/TR/webvtt1/#webvtt-line-terminator) However, with this
+ definition it is unclear whether CRLF is one line terminator (CRLF) or two
+ line terminators (CR and LF).
+
+ To work around this ambiguity, only CRLF and LF are considered as line terminators.
+ To my knowledge only classic Mac OS uses CR as line terminators, so the lexer should
+ still work for most files.
+ -->
+ <rules>
+ <!-- https://www.w3.org/TR/webvtt1/#webvtt-file-body -->
+ <state name="root">
+ <rule pattern="(\AWEBVTT)((?:[ \t][^\r\n]*)?(?:\r?\n){2,})">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Text" />
+ </bygroups>
+ </rule>
+ <rule pattern="(^REGION)([ \t]*$)">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Text" />
+ </bygroups>
+ <push state="region-settings-list" />
+ </rule>
+ <rule
+ pattern="(^STYLE)([ \t]*$)((?:(?!-->)[\s\S])*?)((?:\r?\n){2})">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Text" />
+ <using lexer="CSS" />
+ <token type="Text" />
+ </bygroups>
+ </rule>
+ <rule>
+ <include state="comment" />
+ </rule>
+ <rule
+ pattern="(?=((?![^\r\n]*-->)[^\r\n]*\r?\n)?(\d{2}:)?(?:[0-5][0-9]):(?:[0-5][0-9])\.\d{3}[ \t]+-->[ \t]+(\d{2}:)?(?:[0-5][0-9]):(?:[0-5][0-9])\.\d{3})"
+ >
+ <push state="cues" />
+ </rule>
+ </state>
+
+ <!-- https://www.w3.org/TR/webvtt1/#webvtt-region-settings-list -->
+ <state name="region-settings-list">
+ <rule pattern="(?: |\t|\r?\n(?!\r?\n))+">
+ <token type="Text" />
+ </rule>
+ <rule pattern="(?:\r?\n){2}">
+ <token type="Text" />
+ <pop depth="1" />
+ </rule>
+ <rule pattern="(id)(:)(?!-->)(\S+)">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ </bygroups>
+ </rule>
+ <rule pattern="(width)(:)((?:[1-9]?\d|100)(?:\.\d+)?)(%)">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ <token type="KeywordType" />
+ </bygroups>
+ </rule>
+ <rule pattern="(lines)(:)(\d+)">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ </bygroups>
+ </rule>
+ <rule
+ pattern="(regionanchor|viewportanchor)(:)((?:[1-9]?\d|100)(?:\.\d+)?)(%)(,)((?:[1-9]?\d|100)(?:\.\d+)?)(%)">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ <token type="KeywordType" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ <token type="KeywordType" />
+ </bygroups>
+ </rule>
+ <rule pattern="(scroll)(:)(up)">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="KeywordConstant" />
+ </bygroups>
+ </rule>
+ </state>
+
+ <!-- https://www.w3.org/TR/webvtt1/#webvtt-comment-block -->
+ <state name="comment">
+ <rule
+ pattern="^NOTE( |\t|\r?\n)((?!-->)[\s\S])*?(?:(\r?\n){2}|\Z)">
+ <token type="Comment" />
+ </rule>
+ </state>
+
+ <!--
+ "Zero or more WebVTT cue blocks and WebVTT comment blocks separated from each other by one or more
+ WebVTT line terminators." (https://www.w3.org/TR/webvtt1/#file-structure)
+ -->
+ <state name="cues">
+ <rule
+ pattern="(?:((?!-->)[^\r\n]+)?(\r?\n))?((?:\d{2}:)?(?:[0-5][0-9]):(?:[0-5][0-9])\.\d{3})([ \t]+)(-->)([ \t]+)((?:\d{2}:)?(?:[0-5][0-9]):(?:[0-5][0-9])\.\d{3})([ \t]*)">
+ <bygroups>
+ <token type="Name" />
+ <token type="Text" />
+ <token type="LiteralDate" />
+ <token type="Text" />
+ <token type="Operator" />
+ <token type="Text" />
+ <token type="LiteralDate" />
+ <token type="Text" />
+ </bygroups>
+ <push state="cue-settings-list" />
+ </rule>
+ <rule>
+ <include state="comment" />
+ </rule>
+ </state>
+
+ <!-- https://www.w3.org/TR/webvtt1/#webvtt-cue-settings-list -->
+ <state name="cue-settings-list">
+ <rule pattern="[ \t]+">
+ <token type="Text" />
+ </rule>
+ <rule pattern="(vertical)(:)?(rl|lr)?">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="KeywordConstant" />
+ </bygroups>
+ </rule>
+ <rule
+ pattern="(line)(:)?(?:(?:((?:[1-9]?\d|100)(?:\.\d+)?)(%)|(-?\d+))(?:(,)(start|center|end))?)?">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ <token type="KeywordType" />
+ <token type="Literal" />
+ <token type="Punctuation" />
+ <token type="KeywordConstant" />
+ </bygroups>
+ </rule>
+ <rule
+ pattern="(position)(:)?(?:(?:((?:[1-9]?\d|100)(?:\.\d+)?)(%)|(-?\d+))(?:(,)(line-left|center|line-right))?)?">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ <token type="KeywordType" />
+ <token type="Literal" />
+ <token type="Punctuation" />
+ <token type="KeywordConstant" />
+ </bygroups>
+ </rule>
+ <rule pattern="(size)(:)?(?:((?:[1-9]?\d|100)(?:\.\d+)?)(%))?">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ <token type="KeywordType" />
+ </bygroups>
+ </rule>
+ <rule pattern="(align)(:)?(start|center|end|left|right)?">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="KeywordConstant" />
+ </bygroups>
+ </rule>
+ <rule pattern="(region)(:)?((?![^\r\n]*-->(?=[ \t]+?))[^ \t\r\n]+)?">
+ <bygroups>
+ <token type="Keyword" />
+ <token type="Punctuation" />
+ <token type="Literal" />
+ </bygroups>
+ </rule>
+ <rule
+ pattern="(?=\r?\n)">
+ <push state="cue-payload" />
+ </rule>
+ </state>
+
+ <!-- https://www.w3.org/TR/webvtt1/#cue-payload -->
+ <state name="cue-payload">
+ <rule pattern="(\r?\n){2,}">
+ <token type="Text" />
+ <pop depth="2" />
+ </rule>
+ <rule pattern="[^<&]+?">
+ <token type="Text" />
+ </rule>
+ <rule pattern="&(#\d+|#x[0-9A-Fa-f]+|[a-zA-Z0-9]+);">
+ <token type="Text" />
+ </rule>
+ <rule pattern="(?=<)">
+ <token type="Text" />
+ <push state="cue-span-tag" />
+ </rule>
+ </state>
+ <state name="cue-span-tag">
+ <rule
+ pattern="<(?=c|i|b|u|ruby|rt|v|lang|(?:\d{2}:)?(?:[0-5][0-9]):(?:[0-5][0-9])\.\d{3})">
+ <token type="Punctuation" />
+ <push state="cue-span-start-tag-name" />
+ </rule>
+ <rule pattern="(</)(c|i|b|u|ruby|rt|v|lang)">
+ <bygroups>
+ <token type="Punctuation" />
+ <token type="NameTag" />
+ </bygroups>
+ </rule>
+ <rule pattern=">">
+ <token type="Punctuation" />
+ <pop depth="1" />
+ </rule>
+ </state>
+ <state name="cue-span-start-tag-name">
+ <rule pattern="(c|i|b|u|ruby|rt)|((?:\d{2}:)?(?:[0-5][0-9]):(?:[0-5][0-9])\.\d{3})">
+ <bygroups>
+ <token type="NameTag" />
+ <token type="LiteralDate" />
+ </bygroups>
+ <push state="cue-span-classes-without-annotations" />
+ </rule>
+ <rule pattern="v|lang">
+ <token type="NameTag" />
+ <push state="cue-span-classes-with-annotations" />
+ </rule>
+ </state>
+ <state name="cue-span-classes-without-annotations">
+ <rule>
+ <include state="cue-span-classes" />
+ </rule>
+ <rule pattern="(?=>)">
+ <pop depth="2" />
+ </rule>
+ </state>
+ <state name="cue-span-classes-with-annotations">
+ <rule>
+ <include state="cue-span-classes" />
+ </rule>
+ <rule pattern="(?=[ \t])">
+ <push state="cue-span-start-tag-annotations" />
+ </rule>
+ </state>
+ <state name="cue-span-classes">
+ <rule pattern="(\.)([^ \t\n\r&<>\.]+)">
+ <bygroups>
+ <token type="Punctuation" />
+ <token type="NameTag" />
+ </bygroups>
+ </rule>
+ </state>
+ <state name="cue-span-start-tag-annotations">
+ <rule
+ pattern="[ \t](?:[^\n\r&>]|&(?:#\d+|#x[0-9A-Fa-f]+|[a-zA-Z0-9]+);)+">
+ <token type="Text" />
+ </rule>
+ <rule pattern="(?=>)">
+ <token type="Text" />
+ <pop depth="3" />
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,57 @@
+<lexer>
+ <config>
+ <name>Whiley</name>
+ <alias>whiley</alias>
+ <filename>*.whiley</filename>
+ <mime_type>text/x-whiley</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\\\n">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="/[*](.|\n)*?[*]/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(function|import|from|method|property|type|with|variant)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="(assert|assume|all|break|case|continue|debug|default|do|else|ensures|export|fail|final|for|if|in|is|native|no|new|private|protected|public|return|requires|skip|some|switch|unsafe|where|while)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(true|false|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(bool|byte|int|void)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="0b(?:_?[01])+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0[xX][0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(0|[1-9][0-9]*)">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="[+%=><|^!?/\-*&~:]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{}()\[\],.;\|]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,95 @@
+<lexer>
+ <config>
+ <name>XML</name>
+ <alias>xml</alias>
+ <filename>*.xml</filename>
+ <filename>*.xsl</filename>
+ <filename>*.rss</filename>
+ <filename>*.xslt</filename>
+ <filename>*.xsd</filename>
+ <filename>*.wsdl</filename>
+ <filename>*.wsf</filename>
+ <filename>*.svg</filename>
+ <filename>*.csproj</filename>
+ <filename>*.vcxproj</filename>
+ <filename>*.fsproj</filename>
+ <mime_type>text/xml</mime_type>
+ <mime_type>application/xml</mime_type>
+ <mime_type>image/svg+xml</mime_type>
+ <mime_type>application/rss+xml</mime_type>
+ <mime_type>application/atom+xml</mime_type>
+ <dot_all>true</dot_all>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="[^<&]+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="&\S*?;">
+ <token type="NameEntity"/>
+ </rule>
+ <rule pattern="\<\!\[CDATA\[.*?\]\]\>">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="<!--">
+ <token type="Comment"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="<\?.*?\?>">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="<![^>]*>">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="<\s*[\w:.-]+">
+ <token type="NameTag"/>
+ <push state="tag"/>
+ </rule>
+ <rule pattern="<\s*/\s*[\w:.-]+\s*>">
+ <token type="NameTag"/>
+ </rule>
+ </state>
+ <state name="comment">
+ <rule pattern="[^-]+">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="-->">
+ <token type="Comment"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="-">
+ <token type="Comment"/>
+ </rule>
+ </state>
+ <state name="tag">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="[\w.:-]+\s*=">
+ <token type="NameAttribute"/>
+ <push state="attr"/>
+ </rule>
+ <rule pattern="/?\s*>">
+ <token type="NameTag"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="attr">
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="".*?"">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="'.*?'">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[^\s>]+">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,35 @@
+<lexer>
+ <config>
+ <name>Xorg</name>
+ <alias>xorg.conf</alias>
+ <filename>xorg.conf</filename>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="((|Sub)Section)(\s+)("\w+")">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="LiteralStringEscape"/>
+ <token type="TextWhitespace"/>
+ <token type="LiteralStringEscape"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(End(|Sub)Section)">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(\w+)(\s+)([^\n#]+)">
+ <bygroups>
+ <token type="NameKeyword"/>
+ <token type="TextWhitespace"/>
+ <token type="LiteralString"/>
+ </bygroups>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,122 @@
+<lexer>
+ <config>
+ <name>YAML</name>
+ <alias>yaml</alias>
+ <filename>*.yaml</filename>
+ <filename>*.yml</filename>
+ <mime_type>text/x-yaml</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule>
+ <include state="whitespace"/>
+ </rule>
+ <rule pattern="^---">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="^\.\.\.">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="[\n?]?\s*- ">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="#.*$">
+ <token type="Comment"/>
+ </rule>
+ <rule pattern="!![^\s]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="&[^\s]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\*[^\s]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="^%include\s+[^\n\r]+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule>
+ <include state="key"/>
+ </rule>
+ <rule>
+ <include state="value"/>
+ </rule>
+ <rule pattern="[?:,\[\]]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern=".">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="value">
+ <rule pattern="([>|](?:[+-])?)(\n(^ {1,})(?:(?:.*\n*(?:^\3 *).*)+|.*))">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="LiteralStringDoc"/>
+ <token type="Ignore"/>
+ </bygroups>
+ </rule>
+ <rule pattern="(false|False|FALSE|true|True|TRUE|null|Off|off|yes|Yes|YES|OFF|On|ON|no|No|on|NO|n|N|Y|y)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern=""(?:\\.|[^"])*"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(?:\\.|[^'])*'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?">
+ <token type="LiteralDate"/>
+ </rule>
+ <rule pattern="\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b">
+ <token type="LiteralNumber"/>
+ </rule>
+ <rule pattern="([^\{\}\[\]\?,\:\!\-\*&\@].*)( )+(#.*)">
+ <bygroups>
+ <token type="Literal"/>
+ <token type="TextWhitespace"/>
+ <token type="Comment"/>
+ </bygroups>
+ </rule>
+ <rule pattern="[^\{\}\[\]\?,\:\!\-\*&\@].*">
+ <token type="Literal"/>
+ </rule>
+ </state>
+ <state name="key">
+ <rule pattern=""[^"\n].*": ">
+ <token type="NameTag"/>
+ </rule>
+ <rule pattern="(-)( )([^"\n{]*)(:)( )">
+ <bygroups>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([^"\n{]*)(:)( )">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([^"\n{]*)(:)(\n)">
+ <bygroups>
+ <token type="NameTag"/>
+ <token type="Punctuation"/>
+ <token type="TextWhitespace"/>
+ </bygroups>
+ </rule>
+ </state>
+ <state name="whitespace">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\n+">
+ <token type="TextWhitespace"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,99 @@
+<lexer>
+ <config>
+ <name>YANG</name>
+ <alias>yang</alias>
+ <filename>*.yang</filename>
+ <mime_type>application/yang</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="[\{\}\;]+">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="(?<![\-\w])(and|or|not|\+|\.)(?![\-\w])">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern=""(?:\\"|[^"])*?"">
+ <token type="LiteralStringDouble"/>
+ </rule>
+ <rule pattern="'(?:\\'|[^'])*?'">
+ <token type="LiteralStringSingle"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comments"/>
+ </rule>
+ <rule pattern="//.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(?:^|(?<=[\s{};]))([\w.-]+)(:)([\w.-]+)(?=[\s{};])">
+ <bygroups>
+ <token type="KeywordNamespace"/>
+ <token type="Punctuation"/>
+ <token type="Text"/>
+ </bygroups>
+ </rule>
+ <rule pattern="([0-9]{4}\-[0-9]{2}\-[0-9]{2})(?=[\s\{\}\;])">
+ <token type="LiteralDate"/>
+ </rule>
+ <rule pattern="([0-9]+\.[0-9]+)(?=[\s\{\}\;])">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="([0-9]+)(?=[\s\{\}\;])">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="(submodule|module)(?=[^\w\-\:])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(yang-version|belongs-to|namespace|prefix)(?=[^\w\-\:])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(organization|description|reference|revision|contact)(?=[^\w\-\:])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(revision-date|include|import)(?=[^\w\-\:])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(notification|if-feature|deviation|extension|identity|argument|grouping|typedef|feature|augment|output|action|input|rpc)(?=[^\w\-\:])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(leaf-list|container|presence|anydata|deviate|choice|config|anyxml|refine|leaf|must|list|case|uses|when)(?=[^\w\-\:])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(require-instance|fraction-digits|error-app-tag|error-message|min-elements|max-elements|yin-element|ordered-by|position|modifier|default|pattern|length|status|units|value|range|type|path|enum|base|bit)(?=[^\w\-\:])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(mandatory|unique|key)(?=[^\w\-\:])">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(not-supported|invert-match|deprecated|unbounded|obsolete|current|replace|delete|false|true|user|min|max|add)(?=[^\w\-\:])">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="(instance-identifier|identityref|enumeration|decimal64|boolean|leafref|uint64|uint32|string|binary|uint16|int32|int64|int16|empty|uint8|union|int8|bits)(?=[^\w\-\:])">
+ <token type="NameClass"/>
+ </rule>
+ <rule pattern="[^;{}\s\'\"]+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ <state name="comments">
+ <rule pattern="[^*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/\*">
+ <token type="CommentMultiline"/>
+ <push state="comment"/>
+ </rule>
+ <rule pattern="\*/">
+ <token type="CommentMultiline"/>
+ <pop depth="1"/>
+ </rule>
+ <rule pattern="[*/]">
+ <token type="CommentMultiline"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,74 @@
+<lexer>
+ <config>
+ <name>Z80 Assembly</name>
+ <alias>z80</alias>
+ <filename>*.z80</filename>
+ <filename>*.asm</filename>
+ <case_insensitive>true</case_insensitive>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="[^"\\]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\.">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern=";.*?$">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="^[.\w]+:">
+ <token type="NameLabel"/>
+ </rule>
+ <rule pattern="((0x)|\$)[0-9a-fA-F]+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="[0-9][0-9a-fA-F]+h">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="((0b)|%)[01]+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="-?[0-9]+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="'\\?.'">
+ <token type="LiteralStringChar"/>
+ </rule>
+ <rule pattern="[,=()\\]">
+ <token type="Punctuation"/>
+ </rule>
+ <rule pattern="^\s*#\w+">
+ <token type="CommentPreproc"/>
+ </rule>
+ <rule pattern="\.(db|dw|end|org|byte|word|fill|block|addinstr|echo|error|list|nolist|equ|show|option|seek)">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="(ex|exx|ld|ldd|lddr|ldi|ldir|pop|push|adc|add|cp|cpd|cpdr|cpi|cpir|cpl|daa|dec|inc|neg|sbc|sub|and|bit|ccf|or|res|scf|set|xor|rl|rla|rlc|rlca|rld|rr|rra|rrc|rrca|rrd|sla|sra|srl|call|djnz|jp|jr|ret|rst|nop|reti|retn|di|ei|halt|im|in|ind|indr|ini|inir|out|outd|otdr|outi|otir)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(z|nz|c|nc|po|pe|p|m)">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="[+-/*~\^&|]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="\w+">
+ <token type="Text"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="Text"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,51 @@
+<lexer>
+ <config>
+ <name>Zed</name>
+ <alias>zed</alias>
+ <filename>*.zed</filename>
+ <mime_type>text/zed</mime_type>
+ </config>
+ <rules>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*?[*](\\\n)?/">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="/(\\\n)?[*][\w\W]*">
+ <token type="CommentMultiline"/>
+ </rule>
+ <rule pattern="(definition)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(relation)\b">
+ <token type="KeywordNamespace"/>
+ </rule>
+ <rule pattern="(permission)\b">
+ <token type="KeywordDeclaration"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*/">
+ <token type="NameNamespace"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="#[a-zA-Z_]\w*">
+ <token type="NameVariable"/>
+ </rule>
+ <rule pattern="[+%=><|^!?/\-*&~:]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{}()\[\],.;]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,112 @@
+<lexer>
+ <config>
+ <name>Zig</name>
+ <alias>zig</alias>
+ <filename>*.zig</filename>
+ <mime_type>text/zig</mime_type>
+ </config>
+ <rules>
+ <state name="string">
+ <rule pattern="\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="[^\\"\n]+">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern=""">
+ <token type="LiteralString"/>
+ <pop depth="1"/>
+ </rule>
+ </state>
+ <state name="root">
+ <rule pattern="\n">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="\s+">
+ <token type="TextWhitespace"/>
+ </rule>
+ <rule pattern="//.*?\n">
+ <token type="CommentSingle"/>
+ </rule>
+ <rule pattern="(unreachable|continue|errdefer|suspend|return|resume|cancel|break|catch|async|await|defer|asm|try)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(threadlocal|linksection|allowzero|stdcallcc|volatile|comptime|noalias|nakedcc|inline|export|packed|extern|align|const|pub|var)\b">
+ <token type="KeywordReserved"/>
+ </rule>
+ <rule pattern="(struct|union|error|enum)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(while|for)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(comptime_float|comptime_int|c_longdouble|c_ulonglong|c_longlong|c_voidi8|noreturn|c_ushort|anyerror|promise|c_short|c_ulong|c_uint|c_long|isize|c_int|usize|void|f128|i128|type|bool|u128|u16|f64|f32|u64|i16|f16|i32|u32|i64|u8|i0|u0)\b">
+ <token type="KeywordType"/>
+ </rule>
+ <rule pattern="(undefined|false|true|null)\b">
+ <token type="KeywordConstant"/>
+ </rule>
+ <rule pattern="(switch|orelse|else|and|if|or)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="(usingnamespace|test|fn)\b">
+ <token type="Keyword"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="[0-9]+\.?[eE][-+]?[0-9]+">
+ <token type="LiteralNumberFloat"/>
+ </rule>
+ <rule pattern="0b(?:_?[01])+">
+ <token type="LiteralNumberBin"/>
+ </rule>
+ <rule pattern="0o(?:_?[0-7])+">
+ <token type="LiteralNumberOct"/>
+ </rule>
+ <rule pattern="0x(?:_?[0-9a-fA-F])+">
+ <token type="LiteralNumberHex"/>
+ </rule>
+ <rule pattern="(?:_?[0-9])+">
+ <token type="LiteralNumberInteger"/>
+ </rule>
+ <rule pattern="@[a-zA-Z_]\w*">
+ <token type="NameBuiltin"/>
+ </rule>
+ <rule pattern="[a-zA-Z_]\w*">
+ <token type="Name"/>
+ </rule>
+ <rule pattern="\'\\\'\'">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'">
+ <token type="LiteralStringEscape"/>
+ </rule>
+ <rule pattern="\'[^\\\']\'">
+ <token type="LiteralString"/>
+ </rule>
+ <rule pattern="\\\\[^\n]*">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="c\\\\[^\n]*">
+ <token type="LiteralStringHeredoc"/>
+ </rule>
+ <rule pattern="c?"">
+ <token type="LiteralString"/>
+ <push state="string"/>
+ </rule>
+ <rule pattern="[+%=><|^!?/\-*&~:]">
+ <token type="Operator"/>
+ </rule>
+ <rule pattern="[{}()\[\],.;]">
+ <token type="Punctuation"/>
+ </rule>
+ </state>
+ </rules>
+</lexer>
@@ -0,0 +1,118 @@
+package lexers
+
+import (
+ . "github.com/alecthomas/chroma/v2" // nolint
+)
+
+// Genshi Text lexer.
+var GenshiText = Register(MustNewLexer(
+ &Config{
+ Name: "Genshi Text",
+ Aliases: []string{"genshitext"},
+ Filenames: []string{},
+ MimeTypes: []string{"application/x-genshi-text", "text/x-genshi"},
+ },
+ genshiTextRules,
+))
+
+func genshiTextRules() Rules {
+ return Rules{
+ "root": {
+ {`[^#$\s]+`, Other, nil},
+ {`^(\s*)(##.*)$`, ByGroups(Text, Comment), nil},
+ {`^(\s*)(#)`, ByGroups(Text, CommentPreproc), Push("directive")},
+ Include("variable"),
+ {`[#$\s]`, Other, nil},
+ },
+ "directive": {
+ {`\n`, Text, Pop(1)},
+ {`(?:def|for|if)\s+.*`, Using("Python"), Pop(1)},
+ {`(choose|when|with)([^\S\n]+)(.*)`, ByGroups(Keyword, Text, Using("Python")), Pop(1)},
+ {`(choose|otherwise)\b`, Keyword, Pop(1)},
+ {`(end\w*)([^\S\n]*)(.*)`, ByGroups(Keyword, Text, Comment), Pop(1)},
+ },
+ "variable": {
+ {`(?<!\$)(\$\{)(.+?)(\})`, ByGroups(CommentPreproc, Using("Python"), CommentPreproc), nil},
+ {`(?<!\$)(\$)([a-zA-Z_][\w.]*)`, NameVariable, nil},
+ },
+ }
+}
+
+// Html+Genshi lexer.
+var GenshiHTMLTemplate = Register(MustNewLexer(
+ &Config{
+ Name: "Genshi HTML",
+ Aliases: []string{"html+genshi", "html+kid"},
+ Filenames: []string{},
+ MimeTypes: []string{"text/html+genshi"},
+ NotMultiline: true,
+ DotAll: true,
+ },
+ genshiMarkupRules,
+))
+
+// Genshi lexer.
+var Genshi = Register(MustNewLexer(
+ &Config{
+ Name: "Genshi",
+ Aliases: []string{"genshi", "kid", "xml+genshi", "xml+kid"},
+ Filenames: []string{"*.kid"},
+ MimeTypes: []string{"application/x-genshi", "application/x-kid"},
+ NotMultiline: true,
+ DotAll: true,
+ },
+ genshiMarkupRules,
+))
+
+func genshiMarkupRules() Rules {
+ return Rules{
+ "root": {
+ {`[^<$]+`, Other, nil},
+ {`(<\?python)(.*?)(\?>)`, ByGroups(CommentPreproc, Using("Python"), CommentPreproc), nil},
+ {`<\s*(script|style)\s*.*?>.*?<\s*/\1\s*>`, Other, nil},
+ {`<\s*py:[a-zA-Z0-9]+`, NameTag, Push("pytag")},
+ {`<\s*[a-zA-Z0-9:.]+`, NameTag, Push("tag")},
+ Include("variable"),
+ {`[<$]`, Other, nil},
+ },
+ "pytag": {
+ {`\s+`, Text, nil},
+ {`[\w:-]+\s*=`, NameAttribute, Push("pyattr")},
+ {`/?\s*>`, NameTag, Pop(1)},
+ },
+ "pyattr": {
+ {`(")(.*?)(")`, ByGroups(LiteralString, Using("Python"), LiteralString), Pop(1)},
+ {`(')(.*?)(')`, ByGroups(LiteralString, Using("Python"), LiteralString), Pop(1)},
+ {`[^\s>]+`, LiteralString, Pop(1)},
+ },
+ "tag": {
+ {`\s+`, Text, nil},
+ {`py:[\w-]+\s*=`, NameAttribute, Push("pyattr")},
+ {`[\w:-]+\s*=`, NameAttribute, Push("attr")},
+ {`/?\s*>`, NameTag, Pop(1)},
+ },
+ "attr": {
+ {`"`, LiteralString, Push("attr-dstring")},
+ {`'`, LiteralString, Push("attr-sstring")},
+ {`[^\s>]*`, LiteralString, Pop(1)},
+ },
+ "attr-dstring": {
+ {`"`, LiteralString, Pop(1)},
+ Include("strings"),
+ {`'`, LiteralString, nil},
+ },
+ "attr-sstring": {
+ {`'`, LiteralString, Pop(1)},
+ Include("strings"),
+ {`'`, LiteralString, nil},
+ },
+ "strings": {
+ {`[^"'$]+`, LiteralString, nil},
+ Include("variable"),
+ },
+ "variable": {
+ {`(?<!\$)(\$\{)(.+?)(\})`, ByGroups(CommentPreproc, Using("Python"), CommentPreproc), nil},
+ {`(?<!\$)(\$)([a-zA-Z_][\w\.]*)`, NameVariable, nil},
+ },
+ }
+}
@@ -0,0 +1,81 @@
+package lexers
+
+import (
+ "strings"
+
+ . "github.com/alecthomas/chroma/v2" // nolint
+)
+
+// Go lexer.
+var Go = Register(MustNewLexer(
+ &Config{
+ Name: "Go",
+ Aliases: []string{"go", "golang"},
+ Filenames: []string{"*.go"},
+ MimeTypes: []string{"text/x-gosrc"},
+ },
+ goRules,
+).SetAnalyser(func(text string) float32 {
+ if strings.Contains(text, "fmt.") && strings.Contains(text, "package ") {
+ return 0.5
+ }
+ if strings.Contains(text, "package ") {
+ return 0.1
+ }
+ return 0.0
+}))
+
+func goRules() Rules {
+ return Rules{
+ "root": {
+ {`\n`, Text, nil},
+ {`\s+`, Text, nil},
+ {`\\\n`, Text, nil},
+ {`//[^\n\r]*`, CommentSingle, nil},
+ {`/(\\\n)?[*](.|\n)*?[*](\\\n)?/`, CommentMultiline, nil},
+ {`(import|package)\b`, KeywordNamespace, nil},
+ {`(var|func|struct|map|chan|type|interface|const)\b`, KeywordDeclaration, nil},
+ {Words(``, `\b`, `break`, `default`, `select`, `case`, `defer`, `go`, `else`, `goto`, `switch`, `fallthrough`, `if`, `range`, `continue`, `for`, `return`), Keyword, nil},
+ {`(true|false|iota|nil)\b`, KeywordConstant, nil},
+ {Words(``, `\b(\()`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `int8`, `int16`, `int32`, `int64`, `float`, `float32`, `float64`, `complex64`, `complex128`, `byte`, `rune`, `string`, `bool`, `error`, `uintptr`, `print`, `println`, `panic`, `recover`, `close`, `complex`, `real`, `imag`, `len`, `cap`, `append`, `copy`, `delete`, `new`, `make`, `clear`, `min`, `max`), ByGroups(NameBuiltin, Punctuation), nil},
+ {Words(``, `\b`, `uint`, `uint8`, `uint16`, `uint32`, `uint64`, `int`, `int8`, `int16`, `int32`, `int64`, `float`, `float32`, `float64`, `complex64`, `complex128`, `byte`, `rune`, `string`, `bool`, `error`, `uintptr`, `any`), KeywordType, nil},
+ {`\d+i`, LiteralNumber, nil},
+ {`\d+\.\d*([Ee][-+]\d+)?i`, LiteralNumber, nil},
+ {`\.\d+([Ee][-+]\d+)?i`, LiteralNumber, nil},
+ {`\d+[Ee][-+]\d+i`, LiteralNumber, nil},
+ {`\d+(\.\d+[eE][+\-]?\d+|\.\d*|[eE][+\-]?\d+)`, LiteralNumberFloat, nil},
+ {`\.\d+([eE][+\-]?\d+)?`, LiteralNumberFloat, nil},
+ {`0[0-7]+`, LiteralNumberOct, nil},
+ {`0[xX][0-9a-fA-F_]+`, LiteralNumberHex, nil},
+ {`0b[01_]+`, LiteralNumberBin, nil},
+ {`(0|[1-9][0-9_]*)`, LiteralNumberInteger, nil},
+ {`'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'`, LiteralStringChar, nil},
+ {"(`)([^`]*)(`)", ByGroups(LiteralString, UsingLexer(TypeRemappingLexer(GoTextTemplate, TypeMapping{{Other, LiteralString, nil}})), LiteralString), nil},
+ {`"(\\\\|\\"|[^"])*"`, LiteralString, nil},
+ {`(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\||<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])`, Operator, nil},
+ {`([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(NameFunction, UsingSelf("root"), Punctuation), nil},
+ {`[|^<>=!()\[\]{}.,;:~]`, Punctuation, nil},
+ {`[^\W\d]\w*`, NameOther, nil},
+ },
+ }
+}
+
+var GoHTMLTemplate = Register(DelegatingLexer(HTML, MustNewXMLLexer(
+ embedded,
+ "embedded/go_template.xml",
+).SetConfig(
+ &Config{
+ Name: "Go HTML Template",
+ Aliases: []string{"go-html-template"},
+ },
+)))
+
+var GoTextTemplate = Register(MustNewXMLLexer(
+ embedded,
+ "embedded/go_template.xml",
+).SetConfig(
+ &Config{
+ Name: "Go Text Template",
+ Aliases: []string{"go-text-template"},
+ },
+))
@@ -0,0 +1,647 @@
+package lexers
+
+import (
+ . "github.com/alecthomas/chroma/v2" // nolint
+)
+
+// Haxe lexer.
+var Haxe = Register(MustNewLexer(
+ &Config{
+ Name: "Haxe",
+ Aliases: []string{"hx", "haxe", "hxsl"},
+ Filenames: []string{"*.hx", "*.hxsl"},
+ MimeTypes: []string{"text/haxe", "text/x-haxe", "text/x-hx"},
+ DotAll: true,
+ },
+ haxeRules,
+))
+
+func haxeRules() Rules {
+ return Rules{
+ "root": {
+ Include("spaces"),
+ Include("meta"),
+ {`(?:package)\b`, KeywordNamespace, Push("semicolon", "package")},
+ {`(?:import)\b`, KeywordNamespace, Push("semicolon", "import")},
+ {`(?:using)\b`, KeywordNamespace, Push("semicolon", "using")},
+ {`(?:extern|private)\b`, KeywordDeclaration, nil},
+ {`(?:abstract)\b`, KeywordDeclaration, Push("abstract")},
+ {`(?:class|interface)\b`, KeywordDeclaration, Push("class")},
+ {`(?:enum)\b`, KeywordDeclaration, Push("enum")},
+ {`(?:typedef)\b`, KeywordDeclaration, Push("typedef")},
+ {`(?=.)`, Text, Push("expr-statement")},
+ },
+ "spaces": {
+ {`\s+`, Text, nil},
+ {`//[^\n\r]*`, CommentSingle, nil},
+ {`/\*.*?\*/`, CommentMultiline, nil},
+ {`(#)(if|elseif|else|end|error)\b`, CommentPreproc, MutatorFunc(haxePreProcMutator)},
+ },
+ "string-single-interpol": {
+ {`\$\{`, LiteralStringInterpol, Push("string-interpol-close", "expr")},
+ {`\$\$`, LiteralStringEscape, nil},
+ {`\$(?=(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))`, LiteralStringInterpol, Push("ident")},
+ Include("string-single"),
+ },
+ "string-single": {
+ {`'`, LiteralStringSingle, Pop(1)},
+ {`\\.`, LiteralStringEscape, nil},
+ {`.`, LiteralStringSingle, nil},
+ },
+ "string-double": {
+ {`"`, LiteralStringDouble, Pop(1)},
+ {`\\.`, LiteralStringEscape, nil},
+ {`.`, LiteralStringDouble, nil},
+ },
+ "string-interpol-close": {
+ {`\$(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, LiteralStringInterpol, nil},
+ {`\}`, LiteralStringInterpol, Pop(1)},
+ },
+ "package": {
+ Include("spaces"),
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil},
+ {`\.`, Punctuation, Push("import-ident")},
+ Default(Pop(1)),
+ },
+ "import": {
+ Include("spaces"),
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil},
+ {`\*`, Keyword, nil},
+ {`\.`, Punctuation, Push("import-ident")},
+ {`in`, KeywordNamespace, Push("ident")},
+ Default(Pop(1)),
+ },
+ "import-ident": {
+ Include("spaces"),
+ {`\*`, Keyword, Pop(1)},
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, Pop(1)},
+ },
+ "using": {
+ Include("spaces"),
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameNamespace, nil},
+ {`\.`, Punctuation, Push("import-ident")},
+ Default(Pop(1)),
+ },
+ "preproc-error": {
+ {`\s+`, CommentPreproc, nil},
+ {`'`, LiteralStringSingle, Push("#pop", "string-single")},
+ {`"`, LiteralStringDouble, Push("#pop", "string-double")},
+ Default(Pop(1)),
+ },
+ "preproc-expr": {
+ {`\s+`, CommentPreproc, nil},
+ {`\!`, CommentPreproc, nil},
+ {`\(`, CommentPreproc, Push("#pop", "preproc-parenthesis")},
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, CommentPreproc, Pop(1)},
+ {`\.[0-9]+`, LiteralNumberFloat, nil},
+ {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, nil},
+ {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, nil},
+ {`[0-9]+\.[0-9]+`, LiteralNumberFloat, nil},
+ {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, nil},
+ {`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
+ {`[0-9]+`, LiteralNumberInteger, nil},
+ {`'`, LiteralStringSingle, Push("#pop", "string-single")},
+ {`"`, LiteralStringDouble, Push("#pop", "string-double")},
+ },
+ "preproc-parenthesis": {
+ {`\s+`, CommentPreproc, nil},
+ {`\)`, CommentPreproc, Pop(1)},
+ Default(Push("preproc-expr-in-parenthesis")),
+ },
+ "preproc-expr-chain": {
+ {`\s+`, CommentPreproc, nil},
+ {`(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|/|\-|=>|=)`, CommentPreproc, Push("#pop", "preproc-expr-in-parenthesis")},
+ Default(Pop(1)),
+ },
+ "preproc-expr-in-parenthesis": {
+ {`\s+`, CommentPreproc, nil},
+ {`\!`, CommentPreproc, nil},
+ {`\(`, CommentPreproc, Push("#pop", "preproc-expr-chain", "preproc-parenthesis")},
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, CommentPreproc, Push("#pop", "preproc-expr-chain")},
+ {`\.[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")},
+ {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")},
+ {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")},
+ {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")},
+ {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Push("#pop", "preproc-expr-chain")},
+ {`0x[0-9a-fA-F]+`, LiteralNumberHex, Push("#pop", "preproc-expr-chain")},
+ {`[0-9]+`, LiteralNumberInteger, Push("#pop", "preproc-expr-chain")},
+ {`'`, LiteralStringSingle, Push("#pop", "preproc-expr-chain", "string-single")},
+ {`"`, LiteralStringDouble, Push("#pop", "preproc-expr-chain", "string-double")},
+ },
+ "abstract": {
+ Include("spaces"),
+ Default(Pop(1), Push("abstract-body"), Push("abstract-relation"), Push("abstract-opaque"), Push("type-param-constraint"), Push("type-name")),
+ },
+ "abstract-body": {
+ Include("spaces"),
+ {`\{`, Punctuation, Push("#pop", "class-body")},
+ },
+ "abstract-opaque": {
+ Include("spaces"),
+ {`\(`, Punctuation, Push("#pop", "parenthesis-close", "type")},
+ Default(Pop(1)),
+ },
+ "abstract-relation": {
+ Include("spaces"),
+ {`(?:to|from)`, KeywordDeclaration, Push("type")},
+ {`,`, Punctuation, nil},
+ Default(Pop(1)),
+ },
+ "meta": {
+ Include("spaces"),
+ {`@`, NameDecorator, Push("meta-body", "meta-ident", "meta-colon")},
+ },
+ "meta-colon": {
+ Include("spaces"),
+ {`:`, NameDecorator, Pop(1)},
+ Default(Pop(1)),
+ },
+ "meta-ident": {
+ Include("spaces"),
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameDecorator, Pop(1)},
+ },
+ "meta-body": {
+ Include("spaces"),
+ {`\(`, NameDecorator, Push("#pop", "meta-call")},
+ Default(Pop(1)),
+ },
+ "meta-call": {
+ Include("spaces"),
+ {`\)`, NameDecorator, Pop(1)},
+ Default(Pop(1), Push("meta-call-sep"), Push("expr")),
+ },
+ "meta-call-sep": {
+ Include("spaces"),
+ {`\)`, NameDecorator, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "meta-call")},
+ },
+ "typedef": {
+ Include("spaces"),
+ Default(Pop(1), Push("typedef-body"), Push("type-param-constraint"), Push("type-name")),
+ },
+ "typedef-body": {
+ Include("spaces"),
+ {`=`, Operator, Push("#pop", "optional-semicolon", "type")},
+ },
+ "enum": {
+ Include("spaces"),
+ Default(Pop(1), Push("enum-body"), Push("bracket-open"), Push("type-param-constraint"), Push("type-name")),
+ },
+ "enum-body": {
+ Include("spaces"),
+ Include("meta"),
+ {`\}`, Punctuation, Pop(1)},
+ {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("enum-member", "type-param-constraint")},
+ },
+ "enum-member": {
+ Include("spaces"),
+ {`\(`, Punctuation, Push("#pop", "semicolon", "flag", "function-param")},
+ Default(Pop(1), Push("semicolon"), Push("flag")),
+ },
+ "class": {
+ Include("spaces"),
+ Default(Pop(1), Push("class-body"), Push("bracket-open"), Push("extends"), Push("type-param-constraint"), Push("type-name")),
+ },
+ "extends": {
+ Include("spaces"),
+ {`(?:extends|implements)\b`, KeywordDeclaration, Push("type")},
+ {`,`, Punctuation, nil},
+ Default(Pop(1)),
+ },
+ "bracket-open": {
+ Include("spaces"),
+ {`\{`, Punctuation, Pop(1)},
+ },
+ "bracket-close": {
+ Include("spaces"),
+ {`\}`, Punctuation, Pop(1)},
+ },
+ "class-body": {
+ Include("spaces"),
+ Include("meta"),
+ {`\}`, Punctuation, Pop(1)},
+ {`(?:static|public|private|override|dynamic|inline|macro)\b`, KeywordDeclaration, nil},
+ Default(Push("class-member")),
+ },
+ "class-member": {
+ Include("spaces"),
+ {`(var)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "var")},
+ {`(function)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "class-method")},
+ },
+ "function-local": {
+ Include("spaces"),
+ {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameFunction, Push("#pop", "optional-expr", "flag", "function-param", "parenthesis-open", "type-param-constraint")},
+ Default(Pop(1), Push("optional-expr"), Push("flag"), Push("function-param"), Push("parenthesis-open"), Push("type-param-constraint")),
+ },
+ "optional-expr": {
+ Include("spaces"),
+ Include("expr"),
+ Default(Pop(1)),
+ },
+ "class-method": {
+ Include("spaces"),
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, NameFunction, Push("#pop", "optional-expr", "flag", "function-param", "parenthesis-open", "type-param-constraint")},
+ },
+ "function-param": {
+ Include("spaces"),
+ {`\)`, Punctuation, Pop(1)},
+ {`\?`, Punctuation, nil},
+ {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "function-param-sep", "assign", "flag")},
+ },
+ "function-param-sep": {
+ Include("spaces"),
+ {`\)`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "function-param")},
+ },
+ "prop-get-set": {
+ Include("spaces"),
+ {`\(`, Punctuation, Push("#pop", "parenthesis-close", "prop-get-set-opt", "comma", "prop-get-set-opt")},
+ Default(Pop(1)),
+ },
+ "prop-get-set-opt": {
+ Include("spaces"),
+ {`(?:default|null|never|dynamic|get|set)\b`, Keyword, Pop(1)},
+ {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Text, Pop(1)},
+ },
+ "expr-statement": {
+ Include("spaces"),
+ Default(Pop(1), Push("optional-semicolon"), Push("expr")),
+ },
+ "expr": {
+ Include("spaces"),
+ {`@`, NameDecorator, Push("#pop", "optional-expr", "meta-body", "meta-ident", "meta-colon")},
+ {`(?:\+\+|\-\-|~(?!/)|!|\-)`, Operator, nil},
+ {`\(`, Punctuation, Push("#pop", "expr-chain", "parenthesis")},
+ {`(?:static|public|private|override|dynamic|inline)\b`, KeywordDeclaration, nil},
+ {`(?:function)\b`, KeywordDeclaration, Push("#pop", "expr-chain", "function-local")},
+ {`\{`, Punctuation, Push("#pop", "expr-chain", "bracket")},
+ {`(?:true|false|null)\b`, KeywordConstant, Push("#pop", "expr-chain")},
+ {`(?:this)\b`, Keyword, Push("#pop", "expr-chain")},
+ {`(?:cast)\b`, Keyword, Push("#pop", "expr-chain", "cast")},
+ {`(?:try)\b`, Keyword, Push("#pop", "catch", "expr")},
+ {`(?:var)\b`, KeywordDeclaration, Push("#pop", "var")},
+ {`(?:new)\b`, Keyword, Push("#pop", "expr-chain", "new")},
+ {`(?:switch)\b`, Keyword, Push("#pop", "switch")},
+ {`(?:if)\b`, Keyword, Push("#pop", "if")},
+ {`(?:do)\b`, Keyword, Push("#pop", "do")},
+ {`(?:while)\b`, Keyword, Push("#pop", "while")},
+ {`(?:for)\b`, Keyword, Push("#pop", "for")},
+ {`(?:untyped|throw)\b`, Keyword, nil},
+ {`(?:return)\b`, Keyword, Push("#pop", "optional-expr")},
+ {`(?:macro)\b`, Keyword, Push("#pop", "macro")},
+ {`(?:continue|break)\b`, Keyword, Pop(1)},
+ {`(?:\$\s*[a-z]\b|\$(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)))`, Name, Push("#pop", "dollar")},
+ {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "expr-chain")},
+ {`\.[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")},
+ {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")},
+ {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")},
+ {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Push("#pop", "expr-chain")},
+ {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Push("#pop", "expr-chain")},
+ {`0x[0-9a-fA-F]+`, LiteralNumberHex, Push("#pop", "expr-chain")},
+ {`[0-9]+`, LiteralNumberInteger, Push("#pop", "expr-chain")},
+ {`'`, LiteralStringSingle, Push("#pop", "expr-chain", "string-single-interpol")},
+ {`"`, LiteralStringDouble, Push("#pop", "expr-chain", "string-double")},
+ {`~/(\\\\|\\/|[^/\n])*/[gimsu]*`, LiteralStringRegex, Push("#pop", "expr-chain")},
+ {`\[`, Punctuation, Push("#pop", "expr-chain", "array-decl")},
+ },
+ "expr-chain": {
+ Include("spaces"),
+ {`(?:\+\+|\-\-)`, Operator, nil},
+ {`(?:%=|&=|\|=|\^=|\+=|\-=|\*=|/=|<<=|>\s*>\s*=|>\s*>\s*>\s*=|==|!=|<=|>\s*=|&&|\|\||<<|>>>|>\s*>|\.\.\.|<|>|%|&|\||\^|\+|\*|/|\-|=>|=)`, Operator, Push("#pop", "expr")},
+ {`(?:in)\b`, Keyword, Push("#pop", "expr")},
+ {`\?`, Operator, Push("#pop", "expr", "ternary", "expr")},
+ {`(\.)((?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))`, ByGroups(Punctuation, Name), nil},
+ {`\[`, Punctuation, Push("array-access")},
+ {`\(`, Punctuation, Push("call")},
+ Default(Pop(1)),
+ },
+ "macro": {
+ Include("spaces"),
+ Include("meta"),
+ {`:`, Punctuation, Push("#pop", "type")},
+ {`(?:extern|private)\b`, KeywordDeclaration, nil},
+ {`(?:abstract)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "abstract")},
+ {`(?:class|interface)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "macro-class")},
+ {`(?:enum)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "enum")},
+ {`(?:typedef)\b`, KeywordDeclaration, Push("#pop", "optional-semicolon", "typedef")},
+ Default(Pop(1), Push("expr")),
+ },
+ "macro-class": {
+ {`\{`, Punctuation, Push("#pop", "class-body")},
+ Include("class"),
+ },
+ "cast": {
+ Include("spaces"),
+ {`\(`, Punctuation, Push("#pop", "parenthesis-close", "cast-type", "expr")},
+ Default(Pop(1), Push("expr")),
+ },
+ "cast-type": {
+ Include("spaces"),
+ {`,`, Punctuation, Push("#pop", "type")},
+ Default(Pop(1)),
+ },
+ "catch": {
+ Include("spaces"),
+ {`(?:catch)\b`, Keyword, Push("expr", "function-param", "parenthesis-open")},
+ Default(Pop(1)),
+ },
+ "do": {
+ Include("spaces"),
+ Default(Pop(1), Push("do-while"), Push("expr")),
+ },
+ "do-while": {
+ Include("spaces"),
+ {`(?:while)\b`, Keyword, Push("#pop", "parenthesis", "parenthesis-open")},
+ },
+ "while": {
+ Include("spaces"),
+ {`\(`, Punctuation, Push("#pop", "expr", "parenthesis")},
+ },
+ "for": {
+ Include("spaces"),
+ {`\(`, Punctuation, Push("#pop", "expr", "parenthesis")},
+ },
+ "if": {
+ Include("spaces"),
+ {`\(`, Punctuation, Push("#pop", "else", "optional-semicolon", "expr", "parenthesis")},
+ },
+ "else": {
+ Include("spaces"),
+ {`(?:else)\b`, Keyword, Push("#pop", "expr")},
+ Default(Pop(1)),
+ },
+ "switch": {
+ Include("spaces"),
+ Default(Pop(1), Push("switch-body"), Push("bracket-open"), Push("expr")),
+ },
+ "switch-body": {
+ Include("spaces"),
+ {`(?:case|default)\b`, Keyword, Push("case-block", "case")},
+ {`\}`, Punctuation, Pop(1)},
+ },
+ "case": {
+ Include("spaces"),
+ {`:`, Punctuation, Pop(1)},
+ Default(Pop(1), Push("case-sep"), Push("case-guard"), Push("expr")),
+ },
+ "case-sep": {
+ Include("spaces"),
+ {`:`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "case")},
+ },
+ "case-guard": {
+ Include("spaces"),
+ {`(?:if)\b`, Keyword, Push("#pop", "parenthesis", "parenthesis-open")},
+ Default(Pop(1)),
+ },
+ "case-block": {
+ Include("spaces"),
+ {`(?!(?:case|default)\b|\})`, Keyword, Push("expr-statement")},
+ Default(Pop(1)),
+ },
+ "new": {
+ Include("spaces"),
+ Default(Pop(1), Push("call"), Push("parenthesis-open"), Push("type")),
+ },
+ "array-decl": {
+ Include("spaces"),
+ {`\]`, Punctuation, Pop(1)},
+ Default(Pop(1), Push("array-decl-sep"), Push("expr")),
+ },
+ "array-decl-sep": {
+ Include("spaces"),
+ {`\]`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "array-decl")},
+ },
+ "array-access": {
+ Include("spaces"),
+ Default(Pop(1), Push("array-access-close"), Push("expr")),
+ },
+ "array-access-close": {
+ Include("spaces"),
+ {`\]`, Punctuation, Pop(1)},
+ },
+ "comma": {
+ Include("spaces"),
+ {`,`, Punctuation, Pop(1)},
+ },
+ "colon": {
+ Include("spaces"),
+ {`:`, Punctuation, Pop(1)},
+ },
+ "semicolon": {
+ Include("spaces"),
+ {`;`, Punctuation, Pop(1)},
+ },
+ "optional-semicolon": {
+ Include("spaces"),
+ {`;`, Punctuation, Pop(1)},
+ Default(Pop(1)),
+ },
+ "ident": {
+ Include("spaces"),
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Pop(1)},
+ },
+ "dollar": {
+ Include("spaces"),
+ {`\{`, Punctuation, Push("#pop", "expr-chain", "bracket-close", "expr")},
+ Default(Pop(1), Push("expr-chain")),
+ },
+ "type-name": {
+ Include("spaces"),
+ {`_*[A-Z]\w*`, Name, Pop(1)},
+ },
+ "type-full-name": {
+ Include("spaces"),
+ {`\.`, Punctuation, Push("ident")},
+ Default(Pop(1)),
+ },
+ "type": {
+ Include("spaces"),
+ {`\?`, Punctuation, nil},
+ {`(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "type-check", "type-full-name")},
+ {`\{`, Punctuation, Push("#pop", "type-check", "type-struct")},
+ {`\(`, Punctuation, Push("#pop", "type-check", "type-parenthesis")},
+ },
+ "type-parenthesis": {
+ Include("spaces"),
+ Default(Pop(1), Push("parenthesis-close"), Push("type")),
+ },
+ "type-check": {
+ Include("spaces"),
+ {`->`, Punctuation, Push("#pop", "type")},
+ {`<(?!=)`, Punctuation, Push("type-param")},
+ Default(Pop(1)),
+ },
+ "type-struct": {
+ Include("spaces"),
+ {`\}`, Punctuation, Pop(1)},
+ {`\?`, Punctuation, nil},
+ {`>`, Punctuation, Push("comma", "type")},
+ {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "type-struct-sep", "type", "colon")},
+ Include("class-body"),
+ },
+ "type-struct-sep": {
+ Include("spaces"),
+ {`\}`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "type-struct")},
+ },
+ "type-param-type": {
+ {`\.[0-9]+`, LiteralNumberFloat, Pop(1)},
+ {`[0-9]+[eE][+\-]?[0-9]+`, LiteralNumberFloat, Pop(1)},
+ {`[0-9]+\.[0-9]*[eE][+\-]?[0-9]+`, LiteralNumberFloat, Pop(1)},
+ {`[0-9]+\.[0-9]+`, LiteralNumberFloat, Pop(1)},
+ {`[0-9]+\.(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)|\.\.)`, LiteralNumberFloat, Pop(1)},
+ {`0x[0-9a-fA-F]+`, LiteralNumberHex, Pop(1)},
+ {`[0-9]+`, LiteralNumberInteger, Pop(1)},
+ {`'`, LiteralStringSingle, Push("#pop", "string-single")},
+ {`"`, LiteralStringDouble, Push("#pop", "string-double")},
+ {`~/(\\\\|\\/|[^/\n])*/[gim]*`, LiteralStringRegex, Pop(1)},
+ {`\[`, Operator, Push("#pop", "array-decl")},
+ Include("type"),
+ },
+ "type-param": {
+ Include("spaces"),
+ Default(Pop(1), Push("type-param-sep"), Push("type-param-type")),
+ },
+ "type-param-sep": {
+ Include("spaces"),
+ {`>`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "type-param")},
+ },
+ "type-param-constraint": {
+ Include("spaces"),
+ {`<(?!=)`, Punctuation, Push("#pop", "type-param-constraint-sep", "type-param-constraint-flag", "type-name")},
+ Default(Pop(1)),
+ },
+ "type-param-constraint-sep": {
+ Include("spaces"),
+ {`>`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "type-param-constraint-sep", "type-param-constraint-flag", "type-name")},
+ },
+ "type-param-constraint-flag": {
+ Include("spaces"),
+ {`:`, Punctuation, Push("#pop", "type-param-constraint-flag-type")},
+ Default(Pop(1)),
+ },
+ "type-param-constraint-flag-type": {
+ Include("spaces"),
+ {`\(`, Punctuation, Push("#pop", "type-param-constraint-flag-type-sep", "type")},
+ Default(Pop(1), Push("type")),
+ },
+ "type-param-constraint-flag-type-sep": {
+ Include("spaces"),
+ {`\)`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("type")},
+ },
+ "parenthesis": {
+ Include("spaces"),
+ Default(Pop(1), Push("parenthesis-close"), Push("flag"), Push("expr")),
+ },
+ "parenthesis-open": {
+ Include("spaces"),
+ {`\(`, Punctuation, Pop(1)},
+ },
+ "parenthesis-close": {
+ Include("spaces"),
+ {`\)`, Punctuation, Pop(1)},
+ },
+ "var": {
+ Include("spaces"),
+ {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Text, Push("#pop", "var-sep", "assign", "flag", "prop-get-set")},
+ },
+ "var-sep": {
+ Include("spaces"),
+ {`,`, Punctuation, Push("#pop", "var")},
+ Default(Pop(1)),
+ },
+ "assign": {
+ Include("spaces"),
+ {`=`, Operator, Push("#pop", "expr")},
+ Default(Pop(1)),
+ },
+ "flag": {
+ Include("spaces"),
+ {`:`, Punctuation, Push("#pop", "type")},
+ Default(Pop(1)),
+ },
+ "ternary": {
+ Include("spaces"),
+ {`:`, Operator, Pop(1)},
+ },
+ "call": {
+ Include("spaces"),
+ {`\)`, Punctuation, Pop(1)},
+ Default(Pop(1), Push("call-sep"), Push("expr")),
+ },
+ "call-sep": {
+ Include("spaces"),
+ {`\)`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "call")},
+ },
+ "bracket": {
+ Include("spaces"),
+ {`(?!(?:\$\s*[a-z]\b|\$(?!(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+))))(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Push("#pop", "bracket-check")},
+ {`'`, LiteralStringSingle, Push("#pop", "bracket-check", "string-single")},
+ {`"`, LiteralStringDouble, Push("#pop", "bracket-check", "string-double")},
+ Default(Pop(1), Push("block")),
+ },
+ "bracket-check": {
+ Include("spaces"),
+ {`:`, Punctuation, Push("#pop", "object-sep", "expr")},
+ Default(Pop(1), Push("block"), Push("optional-semicolon"), Push("expr-chain")),
+ },
+ "block": {
+ Include("spaces"),
+ {`\}`, Punctuation, Pop(1)},
+ Default(Push("expr-statement")),
+ },
+ "object": {
+ Include("spaces"),
+ {`\}`, Punctuation, Pop(1)},
+ Default(Pop(1), Push("object-sep"), Push("expr"), Push("colon"), Push("ident-or-string")),
+ },
+ "ident-or-string": {
+ Include("spaces"),
+ {`(?!(?:function|class|static|var|if|else|while|do|for|break|return|continue|extends|implements|import|switch|case|default|public|private|try|untyped|catch|new|this|throw|extern|enum|in|interface|cast|override|dynamic|typedef|package|inline|using|null|true|false|abstract)\b)(?:_*[a-z]\w*|_+[0-9]\w*|_*[A-Z]\w*|_+|\$\w+)`, Name, Pop(1)},
+ {`'`, LiteralStringSingle, Push("#pop", "string-single")},
+ {`"`, LiteralStringDouble, Push("#pop", "string-double")},
+ },
+ "object-sep": {
+ Include("spaces"),
+ {`\}`, Punctuation, Pop(1)},
+ {`,`, Punctuation, Push("#pop", "object")},
+ },
+ }
+}
+
+func haxePreProcMutator(state *LexerState) error {
+ stack, ok := state.Get("haxe-pre-proc").([][]string)
+ if !ok {
+ stack = [][]string{}
+ }
+
+ proc := state.Groups[2]
+ switch proc {
+ case "if":
+ stack = append(stack, state.Stack)
+ case "else", "elseif":
+ if len(stack) > 0 {
+ state.Stack = stack[len(stack)-1]
+ }
+ case "end":
+ if len(stack) > 0 {
+ stack = stack[:len(stack)-1]
+ }
+ }
+
+ if proc == "if" || proc == "elseif" {
+ state.Stack = append(state.Stack, "preproc-expr")
+ }
+
+ if proc == "error" {
+ state.Stack = append(state.Stack, "preproc-error")
+ }
+ state.Set("haxe-pre-proc", stack)
+ return nil
+}
@@ -0,0 +1,8 @@
+package lexers
+
+import (
+ "github.com/alecthomas/chroma/v2"
+)
+
+// HTML lexer.
+var HTML = chroma.MustNewXMLLexer(embedded, "embedded/html.xml")
@@ -0,0 +1,131 @@
+package lexers
+
+import (
+ "strings"
+
+ . "github.com/alecthomas/chroma/v2" // nolint
+)
+
+// HTTP lexer.
+var HTTP = Register(httpBodyContentTypeLexer(MustNewLexer(
+ &Config{
+ Name: "HTTP",
+ Aliases: []string{"http"},
+ Filenames: []string{},
+ MimeTypes: []string{},
+ NotMultiline: true,
+ DotAll: true,
+ },
+ httpRules,
+)))
+
+func httpRules() Rules {
+ return Rules{
+ "root": {
+ {`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([123](?:\.[01])?)(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
+ {`(HTTP)(/)([123](?:\.[01])?)( +)(\d{3})( *)([^\r\n]*)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
+ },
+ "headers": {
+ {`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
+ {`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil},
+ {`\r?\n`, Text, Push("content")},
+ },
+ "content": {
+ {`.+`, EmitterFunc(httpContentBlock), nil},
+ },
+ }
+}
+
+func httpContentBlock(groups []string, state *LexerState) Iterator {
+ tokens := []Token{
+ {Generic, groups[0]},
+ }
+ return Literator(tokens...)
+}
+
+func httpHeaderBlock(groups []string, state *LexerState) Iterator {
+ tokens := []Token{
+ {Name, groups[1]},
+ {Text, groups[2]},
+ {Operator, groups[3]},
+ {Text, groups[4]},
+ {Literal, groups[5]},
+ {Text, groups[6]},
+ }
+ return Literator(tokens...)
+}
+
+func httpContinuousHeaderBlock(groups []string, state *LexerState) Iterator {
+ tokens := []Token{
+ {Text, groups[1]},
+ {Literal, groups[2]},
+ {Text, groups[3]},
+ }
+ return Literator(tokens...)
+}
+
+func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} }
+
+type httpBodyContentTyper struct{ Lexer }
+
+func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
+ var contentType string
+ var isContentType bool
+ var subIterator Iterator
+
+ it, err := d.Lexer.Tokenise(options, text)
+ if err != nil {
+ return nil, err
+ }
+
+ return func() Token {
+ token := it()
+
+ if token == EOF {
+ if subIterator != nil {
+ return subIterator()
+ }
+ return EOF
+ }
+
+ switch {
+ case token.Type == Name && strings.ToLower(token.Value) == "content-type":
+ {
+ isContentType = true
+ }
+ case token.Type == Literal && isContentType:
+ {
+ isContentType = false
+ contentType = strings.TrimSpace(token.Value)
+ pos := strings.Index(contentType, ";")
+ if pos > 0 {
+ contentType = strings.TrimSpace(contentType[:pos])
+ }
+ }
+ case token.Type == Generic && contentType != "":
+ {
+ lexer := MatchMimeType(contentType)
+
+ // application/calendar+xml can be treated as application/xml
+ // if there's not a better match.
+ if lexer == nil && strings.Contains(contentType, "+") {
+ slashPos := strings.Index(contentType, "/")
+ plusPos := strings.LastIndex(contentType, "+")
+ contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
+ lexer = MatchMimeType(contentType)
+ }
+
+ if lexer == nil {
+ token.Type = Text
+ } else {
+ subIterator, err = lexer.Tokenise(nil, token.Value)
+ if err != nil {
+ panic(err)
+ }
+ return EOF
+ }
+ }
+ }
+ return token
+ }, nil
+}